1/* 2** Copyright 2003, Travis Geiselbrecht. All rights reserved. 3** Distributed under the terms of the NewOS License. 4*/ 5#define FUNCTION(x) .global x; .type x,@function; x 6#define LOCAL_FUNCTION(x) .type x,@function; x 7 8/* 9** General exception handling pseudocode: 10** save %r0 into sprg0 11** save %r2 into sprg1 12** save %r1 into sprg2 13** save cr into %r2 (will mess up cr in the conditional check below) 14** load saved msr into %r0 15** see if was in kernel mode 16** if not, 17** load kernel stack from EAR into %r1 18** restore cr from %r2 19** set up in BAT 0 (instruction and data) a identity mapping of 0x0 20** load old msr 21** merge old msr mmu bits with current msr 22** load new msr (should turn the mmu on) 23** save lr into sprg3 24** set up and branch to the next instruction (moving the program counter into kernel space) 25** remove the BAT mapping 26** set up stack frame and push everything 27*/ 28 29#define VEC_ENTRY() \ 30 mtsprg0 %r0 ; /* save %r0 */ \ 31 mtsprg1 %r2 ; /* save %r2 */ \ 32 mtsprg2 %r1 ; /* save the old stack */ \ 33 mfcr %r2 ; /* save cr */ \ 34\ 35 mfsrr1 %r0 ; /* load saved msr */ \ 36 andi. %r0, %r0, (1 << 14) ; /* see if it was in kernel mode */ \ 37 beq- 0f ; /* yep */ \ 38\ 39 /* load the kernel stack */ \ 40 mfear %r1 ; /* load the kernel stack pointer from the EAR reg */ \ 410: \ 42 mtcrf 0xff, %r2 ; /* restore the CR, it was messed up in the previous compare */ \ 43\ 44 /* we are going to turn on the mmu, lets have a BAT entry in place to keep us identity mapped */ \ 45 li %r0, 0x2 ; /* BATU_VS */ \ 46 mtibatu 0, %r0 ; /* load the upper word of the instruction BAT */ \ 47 mtdbatu 0, %r0 ; /* load the upper word of the data BAT */ \ 48 li %r0, 0x10|0x2 ; /* BATL_MC | BATL_PP_RW */ \ 49 mtibatl 0, %r0 ; /* load the lower word of the instruction BAT */ \ 50 mtdbatl 0, %r0 ; /* load the lower word of the data BAT */ \ 51 isync ; \ 52 sync ; \ 53\ 54 /* turn the mmu back on */ \ 55 mfsrr1 %r0 ; /* load saved msr */ \ 56 rlwinm %r0, %r0, 28, 30, 31 ; /* extract mmu bits */ \ 57 mfmsr %r2 ; /* load the current msr */ \ 58 rlwimi %r2, %r0, 4, 26, 27 ; /* merge the mmu bits with the current msr */ \ 59 mtmsr %r2 ; /* load the new msr (turning the mmu back on */ \ 60 isync ; \ 61\ 62 mflr %r0 ; /* load the lr */ \ 63 mtsprg 3, %r0 ; /* save it */ \ 64 lis %r0, 1f@h ; /* load the address of a label in a few instructions */ \ 65 ori %r0, %r0, 1f@l ; /* we will jump to it to get the program counter into the kernel region */ \ 66 mtlr %r0 ; /* get ready to jump to this label */ \ 67 blr ; /* branch to the next instruction (with the mmu on) */ \ 681: \ 69 /* turn the BAT back off */ \ 70 li %r2, 0 ; \ 71 mtibatu 0, %r2 ; \ 72 mtdbatu 0, %r2 ; \ 73 mtibatl 0, %r2 ; \ 74 mtdbatl 0, %r2 ; \ 75 isync ; \ 76 sync ; \ 77\ 78 bl __save_regs ; /* dump an iframe on the stack */ 79 80.global __irqvec_start 81__irqvec_start: 82 .long 0 83 84/* called by the tail end of the VEC_ENTRY macro 85** register expectations: 86** %r1 - stack 87** sprg0 - old %r0 88** sprg1 - old %r2 89** sprg2 - old stack (%r1) 90** sprg3 - old lr 91** all other regs should have been unmodified by the exception handler, 92** and ready to be saved 93*/ 94FUNCTION(__save_regs): 95 mfsprg %r0, 0 96 stwu %r0, -4(%r1) /* push %r0 */ 97 mfsprg %r0, 2 98 stwu %r0, -4(%r1) /* push old %r1 (stack) */ 99 mfsprg %r0, 1 100 stwu %r0, -4(%r1) /* push %r2 */ 101 stwu %r3, -4(%r1) /* push %r3-%r31 */ 102 stwu %r4, -4(%r1) /* push %r3-%r31 */ 103 stwu %r5, -4(%r1) /* push %r3-%r31 */ 104 stwu %r6, -4(%r1) /* push %r3-%r31 */ 105 stwu %r7, -4(%r1) /* push %r3-%r31 */ 106 stwu %r8, -4(%r1) /* push %r3-%r31 */ 107 stwu %r9, -4(%r1) /* push %r3-%r31 */ 108 stwu %r10, -4(%r1) /* push %r3-%r31 */ 109 stwu %r11, -4(%r1) /* push %r3-%r31 */ 110 stwu %r12, -4(%r1) /* push %r3-%r31 */ 111 112 /* strictly speaking, we dont need to save %r13-%r31, but I will for now */ 113 stwu %r13, -4(%r1) /* push %r3-%r31 */ 114 stwu %r14, -4(%r1) /* push %r3-%r31 */ 115 stwu %r15, -4(%r1) /* push %r3-%r31 */ 116 stwu %r16, -4(%r1) /* push %r3-%r31 */ 117 stwu %r17, -4(%r1) /* push %r3-%r31 */ 118 stwu %r18, -4(%r1) /* push %r3-%r31 */ 119 stwu %r19, -4(%r1) /* push %r3-%r31 */ 120 stwu %r20, -4(%r1) /* push %r3-%r31 */ 121 stwu %r21, -4(%r1) /* push %r3-%r31 */ 122 stwu %r22, -4(%r1) /* push %r3-%r31 */ 123 stwu %r23, -4(%r1) /* push %r3-%r31 */ 124 stwu %r24, -4(%r1) /* push %r3-%r31 */ 125 stwu %r25, -4(%r1) /* push %r3-%r31 */ 126 stwu %r26, -4(%r1) /* push %r3-%r31 */ 127 stwu %r27, -4(%r1) /* push %r3-%r31 */ 128 stwu %r28, -4(%r1) /* push %r3-%r31 */ 129 stwu %r29, -4(%r1) /* push %r3-%r31 */ 130 stwu %r30, -4(%r1) /* push %r3-%r31 */ 131 stwu %r31, -4(%r1) /* push %r3-%r31 */ 132 133 /* save some of the other regs */ 134 mfctr %r0 135 stwu %r0, -4(%r1) /* push CTR */ 136 mfxer %r0 137 stwu %r0, -4(%r1) /* push XER */ 138 mfcr %r0 139 stwu %r0, -4(%r1) /* push CR */ 140 mfsprg %r0, 3 141 stwu %r0, -4(%r1) /* push LR */ 142 mfspr %r0, %dsisr 143 stwu %r0, -4(%r1) /* push DSISR */ 144 mfspr %r0, %dar 145 stwu %r0, -4(%r1) /* push DAR */ 146 mfspr %r0, %srr1 147 stwu %r0, -4(%r1) /* push SRR1 */ 148 mfspr %r0, %srr0 149 stwu %r0, -4(%r1) /* push SRR0 */ 150 151 addi %r1, %r1, -8 /* adjust the stack pointer to leave some padding on it for C */ 152 153 /* get outta here */ 154 blr 155 156/* not enough space for __restore_regs_and_rfi here, see below */ 157 158.skip 0x100 - (. - __irqvec_start) 159FUNCTION(system_reset_exception): 160 VEC_ENTRY(); 161 li %r3, 0x100 162 addi %r4, %r1, 8 163 bl ppc_exception_entry 164 bl __restore_regs_and_rfi 165 166.skip 0x200 - (. - __irqvec_start) 167FUNCTION(machine_check_exception): 168 VEC_ENTRY(); 169 li %r3, 0x200 170 addi %r4, %r1, 8 171 bl ppc_exception_entry 172 bl __restore_regs_and_rfi 173 174.skip 0x300 - (. - __irqvec_start) 175FUNCTION(DSI_exception): 176 VEC_ENTRY(); 177 li %r3, 0x300 178 addi %r4, %r1, 8 179 bl ppc_exception_entry 180 bl __restore_regs_and_rfi 181 182.skip 0x400 - (. - __irqvec_start) 183FUNCTION(ISI_exception): 184 VEC_ENTRY(); 185 li %r3, 0x400 186 addi %r4, %r1, 8 187 bl ppc_exception_entry 188 bl __restore_regs_and_rfi 189 190.skip 0x500 - (. - __irqvec_start) 191FUNCTION(external_interrupt_exception): 192 VEC_ENTRY(); 193 li %r3, 0x500 194 addi %r4, %r1, 8 195 bl ppc_exception_entry 196 bl __restore_regs_and_rfi 197 198.skip 0x600 - (. - __irqvec_start) 199FUNCTION(alignment_exception): 200 VEC_ENTRY(); 201 li %r3, 0x600 202 addi %r4, %r1, 8 203 bl ppc_exception_entry 204 bl __restore_regs_and_rfi 205 206.skip 0x700 - (. - __irqvec_start) 207FUNCTION(program_exception): 208 VEC_ENTRY(); 209 li %r3, 0x700 210 addi %r4, %r1, 8 211 bl ppc_exception_entry 212 bl __restore_regs_and_rfi 213 214.skip 0x800 - (. - __irqvec_start) 215FUNCTION(FP_unavailable_exception): 216 VEC_ENTRY(); 217 li %r3, 0x800 218 addi %r4, %r1, 8 219 bl ppc_exception_entry 220 bl __restore_regs_and_rfi 221 222.skip 0x900 - (. - __irqvec_start) 223FUNCTION(decrementer_exception): 224 VEC_ENTRY(); 225 li %r3, 0x900 226 addi %r4, %r1, 8 227 bl ppc_exception_entry 228 bl __restore_regs_and_rfi 229 230/* called at the tail end of each of the exceptions 231** placed here because of the space between these two 232** exception handlers. 233*/ 234FUNCTION(__restore_regs_and_rfi): 235 addi %r1, %r1, 8 /* adjust the stack pointer to get it back to the base of the iframe */ 236 237 lwz %r0, 0(%r1) /* SRR0 */ 238 mtspr %srr0, %r0 239 lwzu %r0, 4(%r1) /* SRR1 */ 240 mtspr %srr1, %r0 241 lwzu %r0, 4(%r1) /* DAR */ 242 mtspr %dar, %r0 243 lwzu %r0, 4(%r1) /* DSISR */ 244 mtspr %dsisr, %r0 245 lwzu %r0, 4(%r1) /* LR */ 246 mtlr %r0 247 lwzu %r0, 4(%r1) /* CR */ 248 mtcr %r0 249 lwzu %r0, 4(%r1) /* XER */ 250 mtxer %r0 251 lwzu %r0, 4(%r1) /* CTR */ 252 mtctr %r0 253 254 /* strictly speaking, we dont really need to have saved these regs */ 255 lwzu %r31, 4(%r1) 256 lwzu %r30, 4(%r1) 257 lwzu %r29, 4(%r1) 258 lwzu %r28, 4(%r1) 259 lwzu %r27, 4(%r1) 260 lwzu %r26, 4(%r1) 261 lwzu %r25, 4(%r1) 262 lwzu %r24, 4(%r1) 263 lwzu %r23, 4(%r1) 264 lwzu %r22, 4(%r1) 265 lwzu %r21, 4(%r1) 266 lwzu %r20, 4(%r1) 267 lwzu %r19, 4(%r1) 268 lwzu %r18, 4(%r1) 269 lwzu %r17, 4(%r1) 270 lwzu %r16, 4(%r1) 271 lwzu %r15, 4(%r1) 272 lwzu %r14, 4(%r1) 273 lwzu %r13, 4(%r1) 274 275 lwzu %r12, 4(%r1) 276 lwzu %r11, 4(%r1) 277 lwzu %r10, 4(%r1) 278 lwzu %r9, 4(%r1) 279 lwzu %r8, 4(%r1) 280 lwzu %r7, 4(%r1) 281 lwzu %r6, 4(%r1) 282 lwzu %r5, 4(%r1) 283 lwzu %r4, 4(%r1) 284 lwzu %r3, 4(%r1) 285 lwzu %r2, 4(%r1) 286 lwz %r0, 8(%r1) 287 lwz %r1, 4(%r1) 288 289 /* get out of here */ 290 rfi 291 292.skip 0xc00 - (. - __irqvec_start) 293FUNCTION(system_call_exception): 294 VEC_ENTRY(); 295 li %r3, 0xc00 296 addi %r4, %r1, 8 297 bl ppc_exception_entry 298 bl __restore_regs_and_rfi 299 300.skip 0xd00 - (. - __irqvec_start) 301FUNCTION(trace_exception): 302 VEC_ENTRY(); 303 li %r3, 0xd00 304 addi %r4, %r1, 8 305 bl ppc_exception_entry 306 bl __restore_regs_and_rfi 307 308.skip 0xe00 - (. - __irqvec_start) 309FUNCTION(FP_assist_exception): 310 VEC_ENTRY(); 311 li %r3, 0xe00 312 addi %r4, %r1, 8 313 bl ppc_exception_entry 314 bl __restore_regs_and_rfi 315 316.skip 0xf00 - (. - __irqvec_start) 317FUNCTION(perf_monitor_exception): 318 /* XXX deal with this, normal VEC_ENTRY code is too big to fit here */ 319 rfi 320 321.skip 0xf20 - (. - __irqvec_start) 322FUNCTION(altivec_unavailable_exception): 323 VEC_ENTRY(); 324 li %r3, 0xf20 325 addi %r4, %r1, 8 326 bl ppc_exception_entry 327 bl __restore_regs_and_rfi 328 329.skip 0x1000 - (. - __irqvec_start) 330FUNCTION(ITLB_miss_exception): 331 VEC_ENTRY(); 332 li %r3, 0x1000 333 addi %r4, %r1, 8 334 bl ppc_exception_entry 335 bl __restore_regs_and_rfi 336 337.skip 0x1100 - (. - __irqvec_start) 338FUNCTION(DTLB_miss_on_load_exception): 339 VEC_ENTRY(); 340 li %r3, 0x1100 341 addi %r4, %r1, 8 342 bl ppc_exception_entry 343 bl __restore_regs_and_rfi 344 345.skip 0x1200 - (. - __irqvec_start) 346FUNCTION(DTLB_miss_on_store_exception): 347 VEC_ENTRY(); 348 li %r3, 0x1200 349 addi %r4, %r1, 8 350 bl ppc_exception_entry 351 bl __restore_regs_and_rfi 352 353.skip 0x1300 - (. - __irqvec_start) 354FUNCTION(instruction_address_breakpoint_exception): 355 VEC_ENTRY(); 356 li %r3, 0x1300 357 addi %r4, %r1, 8 358 bl ppc_exception_entry 359 bl __restore_regs_and_rfi 360 361.skip 0x1400 - (. - __irqvec_start) 362FUNCTION(system_management_exception): 363 VEC_ENTRY(); 364 li %r3, 0x1400 365 addi %r4, %r1, 8 366 bl ppc_exception_entry 367 bl __restore_regs_and_rfi 368 369.skip 0x1600 - (. - __irqvec_start) 370FUNCTION(altivec_assist_exception): 371 VEC_ENTRY(); 372 li %r3, 0x1600 373 addi %r4, %r1, 8 374 bl ppc_exception_entry 375 bl __restore_regs_and_rfi 376 377.skip 0x1700 - (. - __irqvec_start) 378FUNCTION(thermal_management_exception): 379 VEC_ENTRY(); 380 li %r3, 0x1700 381 addi %r4, %r1, 8 382 bl ppc_exception_entry 383 bl __restore_regs_and_rfi 384 385.global __irqvec_end 386__irqvec_end: 387 388