2 * arm exception handlers
6 #undef B /* B is for 'botch' */
9 * exception vectors, copied by trapinit() to somewhere useful
11 TEXT vectors(SB), 1, $-4
12 MOVW 0x18(R15), R15 /* reset */
13 MOVW 0x18(R15), R15 /* undefined instr. */
14 MOVW 0x18(R15), R15 /* SWI & SMC */
15 MOVW 0x18(R15), R15 /* prefetch abort */
16 MOVW 0x18(R15), R15 /* data abort */
17 MOVW 0x18(R15), R15 /* hypervisor call */
18 MOVW 0x18(R15), R15 /* IRQ */
19 MOVW 0x18(R15), R15 /* FIQ */
21 TEXT vtable(SB), 1, $-4
22 WORD $_vrst-KZERO(SB) /* reset, in svc mode already */
23 WORD $_vund(SB) /* undefined, switch to svc mode */
24 WORD $_vsvc(SB) /* swi, in svc mode already */
25 WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
26 WORD $_vdabt(SB) /* data abort, switch to svc mode */
27 WORD $_vhype(SB) /* hypervisor call */
28 WORD $_virq(SB) /* IRQ, switch to svc mode */
29 WORD $_vfiq(SB) /* FIQ, switch to svc mode */
32 * reset - start additional cpus
34 TEXT _vrst(SB), 1, $-4
35 /* running in the zero segment (pc is lower 256MB) */
36 CPSMODE(PsrMsvc) /* should be redundant */
39 SETEND(0) /* force little-endian */
45 /* invalidate i-cache and branch-target cache */
46 MTCP CpSC, 0, PC, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
56 TEXT _vsvc(SB), 1, $-4 /* SWI */
59 /* stack is m->stack */
60 MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
61 MOVW SPSR, R14 /* ureg->psr = SPSR */
62 MOVW.W R14, -4(R13) /* ... */
63 MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
64 MOVW.W R14, -4(R13) /* ... */
66 /* avoid the ambiguity described in notes/movm.w. */
67 MOVM.DB.S [R0-R14], (R13) /* save user level registers */
68 SUB $(NREGS*4), R13 /* r13 now points to ureg */
70 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
73 * set up m and up registers since user registers could contain anything
76 SLL $2, R1 /* convert to word index */
77 MOVW $machaddr(SB), R2
79 MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
81 MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
82 MOVW 8(R(MACH)), R(USER) /* up = m->proc */
84 MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR (user mode) */
86 MOVW R13, R0 /* first arg is pointer to ureg */
87 SUB $8, R13 /* space for argument+link */
91 * caller saves on plan 9, so registers other than 9, 10, 13 & 14
92 * may have been trashed when we get here.
95 MOVW $setR12(SB), R12 /* reload kernel's SB */
97 ADD $(8+4*NREGS), R13 /* make r13 point to ureg->type */
99 MOVW 8(R13), R14 /* restore link */
100 MOVW 4(R13), R0 /* restore SPSR */
102 * return from user-mode exception.
103 * expects new SPSR in R0. R13 must point to ureg->type.
106 TEXT rfue(SB), 1, $-4
107 MOVW R0, SPSR /* ... */
110 * order on stack is type, psr, pc, but RFEV7 needs pc, psr.
111 * step on type and previous word to hold temporary values.
112 * we could instead change the order in which psr & pc are pushed.
114 MOVW 4(R13), R1 /* psr */
115 MOVW 8(R13), R2 /* pc */
116 MOVW R2, 4(R13) /* pc */
117 MOVW R1, 8(R13) /* psr */
119 MOVM.DB.S (R13), [R0-R14] /* restore user registers */
120 ADD $4, R13 /* pop type, sp -> pc */
124 TEXT _vund(SB), 1, $-4 /* undefined */
126 MOVM.IA [R0-R4], (R13) /* free some working space */
130 TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
132 MOVM.IA [R0-R4], (R13) /* free some working space */
133 MOVW $PsrMabt, R0 /* r0 = type */
136 TEXT _vdabt(SB), 1, $-4 /* data abort */
138 MOVM.IA [R0-R4], (R13) /* free some working space */
139 MOVW $(PsrMabt+1), R0 /* r0 = type */
142 TEXT _virq(SB), 1, $-4 /* IRQ */
144 MOVM.IA [R0-R4], (R13) /* free some working space */
145 MOVW $PsrMirq, R0 /* r0 = type */
149 * come here with type in R0 and R13 pointing above saved [r0-r4].
150 * we'll switch to SVC mode and then call trap.
153 // TEXT _vswtch(SB), 1, $-4 /* make symbol visible to debuggers */
156 MOVW SPSR, R1 /* save SPSR for ureg */
158 * R12 needs to be set before using PsrMbz, so BIGENDCHECK code has
161 MOVW R14, R2 /* save interrupted pc for ureg */
162 MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
165 * switch processor to svc mode. this switches the banked registers
166 * (r13 [sp] and r14 [link]) to those of svc mode (so we must be sure
167 * to never get here already in svc mode).
169 CPSMODE(PsrMsvc) /* switch! */
172 AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
176 * here for trap from SVC mode
179 /* push ureg->{type, psr, pc} onto Msvc stack.
180 * r13 points to ureg->type after.
182 MOVM.DB.W [R0-R2], (R13)
183 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
186 * avoid the ambiguity described in notes/movm.w.
187 * In order to get a predictable value in R13 after the stores,
188 * separate the store-multiple from the stack-pointer adjustment.
189 * We'll assume that the old value of R13 should be stored on the stack.
191 /* save kernel level registers, at end r13 points to ureg */
192 MOVM.DB [R0-R14], (R13)
193 SUB $(NREGS*4), R13 /* SP now points to saved R0 */
195 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
196 /* previous mode was svc, so the saved spsr should be sane. */
197 MOVW ((NREGS+1)*4)(R13), R1
199 MOVM.IA (R13), [R0-R8] /* restore a few user registers */
201 MOVW R13, R0 /* first arg is pointer to ureg */
202 SUB $(4*2), R13 /* space for argument+link (for debugger) */
203 MOVW $0xdeaddead, R11 /* marker */
205 BL trap(SB) /* trap(ureg) */
207 * caller saves on plan 9, so registers other than 9, 10, 13 & 14
208 * may have been trashed when we get here.
211 MOVW $setR12(SB), R12 /* reload kernel's SB */
213 ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
216 * if we interrupted a previous trap's handler and are now
217 * returning to it, we need to propagate the current R(MACH) (R10)
218 * by overriding the saved one on the stack, since we may have
219 * been rescheduled and be on a different processor now than
222 MOVW R(MACH), (-(NREGS-MACH)*4)(R13) /* restore current cpu's MACH */
224 MOVW 8(R13), R14 /* restore link */
225 MOVW 4(R13), R0 /* restore SPSR */
227 /* return from kernel-mode exception */
228 MOVW R0, SPSR /* ... */
231 * order on stack is type, psr, pc, but RFEV7 needs pc, psr.
232 * step on type and previous word to hold temporary values.
233 * we could instead change the order in which psr & pc are pushed.
235 MOVW 4(R13), R1 /* psr */
236 MOVW 8(R13), R2 /* pc */
237 MOVW R2, 4(R13) /* pc */
238 MOVW R1, 8(R13) /* psr */
240 /* restore kernel regs other than SP; we're using it */
242 MOVM.IA.W (R13), [R0-R12]
243 ADD $4, R13 /* skip saved kernel SP */
244 MOVM.IA.W (R13), [R14]
245 ADD $4, R13 /* pop type, sp -> pc */
250 * here for trap from USER mode
253 MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
254 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
256 /* avoid the ambiguity described in notes/movm.w. */
257 MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
258 SUB $(NREGS*4), R13 /* r13 now points to ureg */
260 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
263 * set up m and up registers since user registers could contain anything
266 SLL $2, R1 /* convert to word index */
267 MOVW $machaddr(SB), R2
269 MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
271 MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
272 MOVW 8(R(MACH)), R(USER) /* up = m->proc */
274 MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR */
276 MOVW R13, R0 /* first arg is pointer to ureg */
277 SUB $(4*2), R13 /* space for argument+link (for debugger) */
279 BL trap(SB) /* trap(ureg) */
281 * caller saves on plan 9, so registers other than 9, 10, 13 & 14
282 * may have been trashed when we get here.
285 ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
287 MOVW 8(R13), R14 /* restore link */
288 MOVW 4(R13), R0 /* restore SPSR */
290 MOVW 4(R13), R0 /* restore SPSR */
294 TEXT _vfiq(SB), 1, $-4 /* FIQ */
299 RFE /* FIQ is special, ignore it for now */
301 TEXT _vhype(SB), 1, $-4
309 * set the stack value for the mode passed in R0
311 TEXT setr13(SB), 1, $-4
315 BIC $(PsrMask|PsrMbz), R2, R3
316 ORR $(PsrDirq|PsrDfiq), R3
319 MOVW R3, CPSR /* switch to new mode */
321 MOVW R13, R0 /* return old sp */
322 MOVW R1, R13 /* install new one */
324 MOVW R2, CPSR /* switch back to old mode */