4 * Entered here from Compaq's bootldr with MMU disabled.
7 MOVW $setR12(SB), R12 /* load the SB */
9 /* SVC mode, interrupts disabled */
10 MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
15 MCR CpMMU, 0, R1, C(CpControl), C(0x0)
18 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
25 /* drain write buffer */
26 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
28 MOVW $(MACHADDR+BY2PG), R13 /* stack */
29 SUB $4, R13 /* link */
32 /* we shouldn't get here */
35 BL _div(SB) /* hack to get _div etc loaded */
38 TEXT mmuinvalidate(SB), $-4
39 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
43 TEXT mmuinvalidateaddr(SB), $-4
44 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
47 /* write back and invalidate i and d caches */
48 TEXT cacheflush(SB), $-4
49 /* write back any dirty data */
57 /* drain write buffer and invalidate i&d cache contents */
58 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
59 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
68 /* write back d cache */
70 /* write back any dirty data */
79 /* drain write buffer */
80 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
83 /* write back a single cache line */
84 TEXT cachewbaddr(SB), $-4
86 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
89 /* write back a region of cache lines */
90 TEXT cachewbregion(SB), $-4
97 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
103 /* invalidate the dcache */
104 TEXT dcacheinvalidate(SB), $-4
105 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
108 /* invalidate the icache */
109 TEXT icacheinvalidate(SB), $-4
110 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
113 /* drain write buffer */
114 TEXT wbflush(SB), $-4
116 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
120 TEXT getcpuid(SB), $-4
121 MRC CpMMU, 0, R0, C(CpCPUID), C(0x0)
124 /* return fault status */
126 MRC CpMMU, 0, R0, C(CpFSR), C(0x0)
129 /* return fault address */
131 MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
134 /* return fault address */
136 MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
139 /* set the translation table base */
141 MCR CpMMU, 0, R0, C(CpTTB), C(0x0)
145 * enable mmu, i and d caches
147 TEXT mmuenable(SB), $-4
148 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
149 ORR $(CpCmmuena|CpCdcache|CpCicache|CpCwb), R0
150 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
153 TEXT mmudisable(SB), $-4
154 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
155 BIC $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
156 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
160 * use exception vectors at 0xffff0000
162 TEXT mappedIvecEnable(SB), $-4
163 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
165 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
167 TEXT mappedIvecDisable(SB), $-4
168 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
170 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
173 /* set the translation table base */
175 MCR CpMMU, 0, R0, C(CpDAC), C(0x0)
178 /* set address translation pid */
180 MCR CpMMU, 0, R0, C(CpPID), C(0x0)
184 * set the stack value for the mode passed in R0
201 * exception vectors, copied by trapinit() to somewhere useful
204 TEXT vectors(SB), $-4
205 MOVW 0x18(R15), R15 /* reset */
206 MOVW 0x18(R15), R15 /* undefined */
207 MOVW 0x18(R15), R15 /* SWI */
208 MOVW 0x18(R15), R15 /* prefetch abort */
209 MOVW 0x18(R15), R15 /* data abort */
210 MOVW 0x18(R15), R15 /* reserved */
211 MOVW 0x18(R15), R15 /* IRQ */
212 MOVW 0x18(R15), R15 /* FIQ */
215 WORD $_vsvc(SB) /* reset, in svc mode already */
216 WORD $_vund(SB) /* undefined, switch to svc mode */
217 WORD $_vsvc(SB) /* swi, in svc mode already */
218 WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
219 WORD $_vdabt(SB) /* data abort, switch to svc mode */
220 WORD $_vsvc(SB) /* reserved */
221 WORD $_virq(SB) /* IRQ, switch to svc mode */
222 WORD $_vfiq(SB) /* FIQ, switch to svc mode */
227 TEXT _vsvc(SB), $-4 /* SWI */
228 MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
229 MOVW SPSR, R14 /* ureg->psr = SPSR */
230 MOVW.W R14, -4(R13) /* ... */
231 MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
232 MOVW.W R14, -4(R13) /* ... */
233 MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
234 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
235 MOVW R13, R0 /* first arg is pointer to ureg */
236 SUB $8, R13 /* space for argument+link */
240 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
241 MOVW 8(R13), R14 /* restore link */
242 MOVW 4(R13), R0 /* restore SPSR */
243 MOVW R0, SPSR /* ... */
244 MOVM.DB.S (R13), [R0-R14] /* restore registers */
245 ADD $8, R13 /* pop past ureg->{type+psr} */
246 RFE /* MOVM.IA.S.W (R13), [R15] */
248 TEXT _vund(SB), $-4 /* undefined */
249 MOVM.IA [R0-R4], (R13) /* free some working space */
253 TEXT _vpabt(SB), $-4 /* prefetch abort */
254 MOVM.IA [R0-R4], (R13) /* free some working space */
255 MOVW $PsrMabt, R0 /* r0 = type */
258 TEXT _vdabt(SB), $-4 /* prefetch abort */
259 MOVM.IA [R0-R4], (R13) /* free some working space */
260 MOVW $(PsrMabt+1), R0 /* r0 = type */
263 TEXT _virq(SB), $-4 /* IRQ */
264 MOVM.IA [R0-R4], (R13) /* free some working space */
265 MOVW $PsrMirq, R0 /* r0 = type */
269 * come here with type in R0 and R13 pointing above saved [r0-r4]
270 * and type in r0. we'll switch to SVC mode and then call trap.
273 MOVW SPSR, R1 /* save SPSR for ureg */
274 MOVW R14, R2 /* save interrupted pc for ureg */
275 MOVW R13, R3 /* save pointer to where the original [R0-R3] are */
277 /* switch to svc mode */
280 ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
283 /* interupted code kernel or user? */
287 /* here for trap from SVC mode */
288 MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
289 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
290 MOVM.DB.W [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
291 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
292 MOVW R13, R0 /* first arg is pointer to ureg */
293 SUB $8, R13 /* space for argument+link (for debugger) */
294 MOVW $0xdeaddead,R11 /* marker */
298 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
299 MOVW 8(R13), R14 /* restore link */
300 MOVW 4(R13), R0 /* restore SPSR */
301 MOVW R0, SPSR /* ... */
302 MOVM.DB (R13), [R0-R14] /* restore registers */
303 ADD $8, R13 /* pop past ureg->{type+psr} */
304 RFE /* MOVM.IA.S.W (R13), [R15] */
306 /* here for trap from USER mode */
308 MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
309 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
310 MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
311 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
312 MOVW R13, R0 /* first arg is pointer to ureg */
313 SUB $8, R13 /* space for argument+link (for debugger) */
317 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
318 MOVW 8(R13), R14 /* restore link */
319 MOVW 4(R13), R0 /* restore SPSR */
320 MOVW R0, SPSR /* ... */
321 MOVM.DB.S (R13), [R0-R14] /* restore registers */
322 ADD $8, R13 /* pop past ureg->{type+psr} */
323 RFE /* MOVM.IA.S.W (R13), [R15] */
325 TEXT _vfiq(SB), $-4 /* FIQ */
326 RFE /* FIQ is special, ignore it for now */
329 * This is the first jump from kernel to user mode.
330 * Fake a return from interrupt.
332 * Enter with R0 containing the user stack pointer.
333 * UTZERO + 0x20 is always the entry point.
337 /* store the user stack pointer into the USR_r13 */
338 MOVM.DB.W [R0], (R13)
339 MOVM.S.IA.W (R13),[R13]
341 /* set up a PSR for user level */
345 /* save the PC on the stack */
346 MOVW $(UTZERO+0x20), R0
349 /* return from interrupt */
350 RFE /* MOVM.IA.S.W (R13), [R15] */
353 * here to jump to a newly forked process
356 ADD $(4*15), R13 /* make r13 point to ureg->type */
357 MOVW 8(R13), R14 /* restore link */
358 MOVW 4(R13), R0 /* restore SPSR */
359 MOVW R0, SPSR /* ... */
360 MOVM.DB.S (R13), [R0-R14] /* restore registers */
361 ADD $8, R13 /* pop past ureg->{type+psr} */
362 RFE /* MOVM.IA.S.W (R13), [R15] */
365 /* save caller pc in Mach */
366 MOVW $(MACHADDR+0x04),R2
368 /* turn off interrupts */
370 ORR $(PsrDfiq|PsrDirq), R0, R1
376 BIC $(PsrDfiq|PsrDirq), R0, R1
381 /* save caller pc in Mach */
382 MOVW $(MACHADDR+0x04),R2
384 /* reset interrupt level */
390 TEXT splxpc(SB), $-4 /* for iunlock */
401 AND $(PsrDfiq|PsrDirq), R0
402 EOR $(PsrDfiq|PsrDirq), R0
413 TEXT getcallerpc(SB), $-4
423 TEXT setlabel(SB), $-4
424 MOVW R13, 0(R0) /* sp */
425 MOVW R14, 4(R0) /* pc */
429 TEXT gotolabel(SB), $-4
430 MOVW 0(R0), R13 /* sp */
431 MOVW 4(R0), R14 /* pc */
436 /* The first MCR instruction of this function needs to be on a cache-line
437 * boundary; to make this happen, it will be copied (in trap.c).
439 * Doze puts the machine into idle mode. Any interrupt will get it out
440 * at the next instruction (the RET, to be precise).
451 MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
453 MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2