4 * Entered here from Compaq's bootldr with MMU disabled.
7 MOVW $setR12(SB), R12 /* load the SB */
9 /* SVC mode, interrupts disabled */
10 MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
15 MCR CpMMU, 0, R1, C(CpControl), C(0x0)
18 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
25 /* drain write buffer */
26 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
28 MOVW $(MACHADDR+4*BY2PG), R13 /* stack */
29 SUB $4, R13 /* link */
32 /* we shouldn't get here */
35 BL _div(SB) /* hack to get _div etc loaded */
38 TEXT mmuinvalidate(SB), $-4
39 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
43 TEXT mmuinvalidateaddr(SB), $-4
44 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
47 /* write back and invalidate i and d caches */
48 TEXT cacheflush(SB), $-4
51 ORR $(PsrDirq), R3, R1
54 /* write back any dirty data */
62 /* drain write buffer and invalidate i cache contents */
63 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
64 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
76 /* write back d cache */
78 /* write back any dirty data */
87 /* drain write buffer */
88 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
91 /* write back a single cache line */
92 TEXT cachewbaddr(SB), $-4
94 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
97 /* write back a region of cache lines */
98 TEXT cachewbregion(SB), $-4
105 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
111 /* invalidate the dcache */
112 TEXT dcacheinvalidate(SB), $-4
113 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
116 /* invalidate the icache */
117 TEXT icacheinvalidate(SB), $-4
118 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
121 /* drain write buffer */
122 TEXT wbflush(SB), $-4
124 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
128 TEXT getcpuid(SB), $-4
129 MRC CpMMU, 0, R0, C(CpCPUID), C(0x0)
132 /* return fault status */
134 MRC CpMMU, 0, R0, C(CpFSR), C(0x0)
137 /* return mmu control register */
138 TEXT getcontrol(SB), $-4
140 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
143 /* return mmu dac register */
146 MRC CpMMU, 0, R0, C(CpDAC), C(0x0)
149 /* return mmu ttb register */
152 MRC CpMMU, 0, R0, C(CpTTB), C(0x0)
155 /* return fault address */
157 MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
160 /* set the translation table base */
162 MCR CpMMU, 0, R0, C(CpTTB), C(0x0)
166 * enable mmu, i and d caches
168 TEXT mmuenable(SB), $-4
169 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
170 ORR $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
172 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
179 TEXT mmudisable(SB), $-4
180 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
181 BIC $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
182 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
186 * use exception vectors at 0xffff0000
188 TEXT mappedIvecEnable(SB), $-4
189 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
191 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
193 TEXT mappedIvecDisable(SB), $-4
194 MRC CpMMU, 0, R0, C(CpControl), C(0x0)
196 MCR CpMMU, 0, R0, C(CpControl), C(0x0)
199 /* set the translation table base */
201 MCR CpMMU, 0, R0, C(CpDAC), C(0x0)
204 /* set address translation pid */
206 MCR CpMMU, 0, R0, C(CpPID), C(0x0)
210 * set the stack value for the mode passed in R0
227 * exception vectors, copied by trapinit() to somewhere useful
230 TEXT vectors(SB), $-4
231 MOVW 0x18(R15), R15 /* reset */
232 MOVW 0x18(R15), R15 /* undefined */
233 MOVW 0x18(R15), R15 /* SWI */
234 MOVW 0x18(R15), R15 /* prefetch abort */
235 MOVW 0x18(R15), R15 /* data abort */
236 MOVW 0x18(R15), R15 /* reserved */
237 MOVW 0x18(R15), R15 /* IRQ */
238 MOVW 0x18(R15), R15 /* FIQ */
241 WORD $_vsvc(SB) /* reset, in svc mode already */
242 WORD $_vund(SB) /* undefined, switch to svc mode */
243 WORD $_vsvc(SB) /* swi, in svc mode already */
244 WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
245 WORD $_vdabt(SB) /* data abort, switch to svc mode */
246 WORD $_vsvc(SB) /* reserved */
247 WORD $_virq(SB) /* IRQ, switch to svc mode */
248 WORD $_vfiq(SB) /* FIQ, switch to svc mode */
253 TEXT _vsvc(SB), $-4 /* SWI */
254 MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
255 MOVW SPSR, R14 /* ureg->psr = SPSR */
256 MOVW.W R14, -4(R13) /* ... */
257 MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
258 MOVW.W R14, -4(R13) /* ... */
259 MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
260 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
261 MOVW R13, R0 /* first arg is pointer to ureg */
262 SUB $8, R13 /* space for argument+link */
266 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
267 MOVW 8(R13), R14 /* restore link */
268 MOVW 4(R13), R0 /* restore SPSR */
269 MOVW R0, SPSR /* ... */
270 MOVM.DB.S (R13), [R0-R14] /* restore registers */
271 ADD $8, R13 /* pop past ureg->{type+psr} */
272 RFE /* MOVM.IA.S.W (R13), [R15] */
274 TEXT _vund(SB), $-4 /* undefined */
275 MOVM.IA [R0-R4], (R13) /* free some working space */
279 TEXT _vpabt(SB), $-4 /* prefetch abort */
280 MOVM.IA [R0-R4], (R13) /* free some working space */
281 MOVW $PsrMabt, R0 /* r0 = type */
284 TEXT _vdabt(SB), $-4 /* prefetch abort */
285 MOVM.IA [R0-R4], (R13) /* free some working space */
286 MOVW $(PsrMabt+1), R0 /* r0 = type */
289 TEXT _virq(SB), $-4 /* IRQ */
290 MOVM.IA [R0-R4], (R13) /* free some working space */
291 MOVW $PsrMirq, R0 /* r0 = type */
295 * come here with type in R0 and R13 pointing above saved [r0-r4]
296 * and type in r0. we'll switch to SVC mode and then call trap.
299 MOVW SPSR, R1 /* save SPSR for ureg */
300 MOVW R14, R2 /* save interrupted pc for ureg */
301 MOVW R13, R3 /* save pointer to where the original [R0-R3] are */
303 /* switch to svc mode */
306 ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
309 /* interupted code kernel or user? */
313 /* here for trap from SVC mode */
314 MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
315 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
316 MOVM.DB.W [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
317 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
318 MOVW R13, R0 /* first arg is pointer to ureg */
319 SUB $8, R13 /* space for argument+link (for debugger) */
320 MOVW $0xdeaddead,R11 /* marker */
324 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
325 MOVW 8(R13), R14 /* restore link */
326 MOVW 4(R13), R0 /* restore SPSR */
327 MOVW R0, SPSR /* ... */
328 MOVM.DB (R13), [R0-R14] /* restore registers */
329 ADD $8, R13 /* pop past ureg->{type+psr} */
330 RFE /* MOVM.IA.S.W (R13), [R15] */
332 /* here for trap from USER mode */
334 MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
335 MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
336 MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
337 MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
338 MOVW R13, R0 /* first arg is pointer to ureg */
339 SUB $8, R13 /* space for argument+link (for debugger) */
343 ADD $(8+4*15), R13 /* make r13 point to ureg->type */
344 MOVW 8(R13), R14 /* restore link */
345 MOVW 4(R13), R0 /* restore SPSR */
346 MOVW R0, SPSR /* ... */
347 MOVM.DB.S (R13), [R0-R14] /* restore registers */
348 ADD $8, R13 /* pop past ureg->{type+psr} */
349 RFE /* MOVM.IA.S.W (R13), [R15] */
351 TEXT _vfiq(SB), $-4 /* FIQ */
352 RFE /* FIQ is special, ignore it for now */
355 * This is the first jump from kernel to user mode.
356 * Fake a return from interrupt.
358 * Enter with R0 containing the user stack pointer.
359 * UTZERO + 0x20 is always the entry point.
363 /* store the user stack pointer into the USR_r13 */
364 MOVM.DB.W [R0], (R13)
365 MOVM.S.IA.W (R13),[R13]
367 /* set up a PSR for user level */
371 /* save the PC on the stack */
372 MOVW $(UTZERO+0x20), R0
375 /* return from interrupt */
376 RFE /* MOVM.IA.S.W (R13), [R15] */
379 * here to jump to a newly forked process
382 ADD $(4*15), R13 /* make r13 point to ureg->type */
383 MOVW 8(R13), R14 /* restore link */
384 MOVW 4(R13), R0 /* restore SPSR */
385 MOVW R0, SPSR /* ... */
386 MOVM.DB.S (R13), [R0-R14] /* restore registers */
387 ADD $8, R13 /* pop past ureg->{type+psr} */
388 RFE /* MOVM.IA.S.W (R13), [R15] */
391 /* save caller pc in Mach */
392 MOVW $(MACHADDR+0x04),R2
394 /* turn off interrupts */
396 ORR $(PsrDirq), R0, R1
402 BIC $(PsrDirq), R0, R1
407 /* save caller pc in Mach */
408 MOVW $(MACHADDR+0x04),R2
410 /* reset interrupt level */
416 TEXT splxpc(SB), $-4 /* for iunlock */
443 TEXT getlink(SB), $-4
447 TEXT getcallerpc(SB), $-4
465 TEXT setlabel(SB), $-4
466 MOVW R13, 0(R0) /* sp */
467 MOVW R14, 4(R0) /* pc */
471 TEXT gotolabel(SB), $-4
472 MOVW 0(R0), R13 /* sp */
473 MOVW 4(R0), R14 /* pc */
477 /* save the state machine in power_state[] for an upcoming suspend
479 TEXT setpowerlabel(SB), $-4
480 MOVW $power_state+0(SB), R0
481 /* svc */ /* power_state[]: what */
501 MRC CpMMU, 0, R3, C(CpDAC), C(0x0)
503 MRC CpMMU, 0, R3, C(CpTTB), C(0x0)
505 MRC CpMMU, 0, R3, C(CpControl), C(0x0)
507 MRC CpMMU, 0, R3, C(CpFSR), C(0x0)
509 MRC CpMMU, 0, R3, C(CpFAR), C(0x0)
511 MRC CpMMU, 0, R3, C(CpPID), C(0x0)
514 BIC $(PsrMask), R2, R3
523 BIC $(PsrMask), R2, R3
532 BIC $(PsrMask), R2, R3
541 BIC $(PsrMask), R2, R3
550 BIC $(PsrMask), R2, R3
568 /* Entered after a resume from suspend state.
569 * The bootldr jumps here after a processor reset.
571 TEXT power_resume(SB), $-4
572 MOVW $setR12(SB), R12 /* load the SB */
573 /* SVC mode, interrupts disabled */
574 MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
576 /* gotopowerlabel() */
579 MOVW $power_state+0(SB), R0
580 MOVW 56(R0), R1 /* R1: SPSR, R2: CPSR */
586 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
592 /* drain write buffer */
593 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
594 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
596 MCR CpMMU, 0, R3, C(CpDAC), C(0x0)
598 MCR CpMMU, 0, R3, C(CpTTB), C(0x0)
600 MCR CpMMU, 0, R3, C(CpFSR), C(0x0)
602 MCR CpMMU, 0, R3, C(CpFAR), C(0x0)
604 MCR CpMMU, 0, R3, C(CpPID), C(0x0)
606 MCR CpMMU, 0, R3, C(CpControl), C(0x0) /* Enable cache */
611 /* flush i&d caches */
612 MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
614 MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7), 0
621 BIC $(PsrMask), R2, R3
629 BIC $(PsrMask), R2, R3
638 BIC $(PsrMask), R2, R3
647 BIC $(PsrMask), R2, R3
656 BIC $(PsrMask), R2, R3
691 TEXT power_down(SB), $-4
693 TEXT sa1100_power_off<>+0(SB),$8
694 MOVW resetregs+0(SB),R7
695 MOVW gpioregs+0(SB),R6
696 MOVW memconfregs+0(SB),R5
697 MOVW powerregs+0(SB),R3
700 /* wakeup on power | rtc */
701 MOVW $(PWR_rtc|PWR_gpio0),R2
704 /* clear reset status */
708 MOVW $(PCFR_opde|PCFR_fp|PCFR_fs), R2
713 /* set resume address (pspr)*/
714 MOVW $resumeaddr+0(SB),R1
720 /* disable clock switching */
721 MCR CpPWR, 0, R1, C(CpTest), C(0x2), 2
723 /* adjust mem timing */
724 MOVW memconfregs+0(SB),R5
726 ORR $(MDREFR_k1db2), R2
729 /* set PLL to lower speed w/ delay (ppcr = 0)*/
730 MOVW powerregs+0(SB),R3
740 /* setup registers for suspend procedure:
741 * 1. clear RT in mscx (R1, R7, R8)
742 * 2. clear DRI in mdrefr (R4)
743 * 3. set slfrsh in mdrefr (R6)
744 * 4. clear DE in mdcnfg (R9)
745 * 5. clear dram refresh (R10)
746 * 6. force sleep (R2)
764 ORR $(MDREFR_slfrsh), R2, R6
767 BIC $(MDCFNG_de), R9, R9
770 BIC $(MDREFR_slfrsh), R2, R2
771 BIC $(MDREFR_e1pin), R2, R2
776 TEXT power_magic(SB), $-4
777 /* power_code gets copied into the area of no-ops below,
778 * at a cache-line boundary (8 instructions)
797 TEXT power_code(SB), $-4
798 /* Follow the procedure; this code gets copied to the no-op
799 * area preceding this code
816 B slloop /* loop waiting for sleep */
818 /* The first MCR instruction of this function needs to be on a cache-line
819 * boundary; to make this happen, it will be copied to the first cache-line
820 * boundary 8 words from the start of doze.
822 * Doze puts the machine into idle mode. Any interrupt will get it out
823 * at the next instruction (the RET, to be precise).
853 TEXT doze_code(SB), $-4
854 MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
856 MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2