2 * sheevaplug machine assist
3 * arm926ej-s processor at 1.2GHz
5 * loader uses R11 as scratch.
6 * R9 and R10 are used for `extern register' variables.
8 * ARM v7 arch. ref. man. (I know, this is v5) §B1.3.3 that
9 * we don't need barriers around moves to CPSR. The ARM v6 manual
10 * seems to be silent on the subject.
15 * MCR and MRC are counter-intuitively named.
16 * MCR coproc, opcode1, Rd, CRn, CRm[, opcode2] # arm -> coproc
17 * MRC coproc, opcode1, Rd, CRn, CRm[, opcode2] # coproc -> arm
21 * Entered here from Das U-Boot with MMU disabled.
22 * Until the MMU is enabled it is OK to call functions provided
23 * they are within ±32MiB relative and do not require any
24 * local variables or more than one argument (i.e. there is
27 TEXT _start(SB), 1, $-4
28 MOVW $setR12(SB), R12 /* load the SB */
30 /* SVC mode, interrupts disabled */
31 MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
36 * disable the MMU & caches,
37 * switch to system permission & 32-bit addresses.
39 MOVW $(CpCsystem|CpCd32|CpCi32), R1
40 MCR CpSC, 0, R1, C(CpCONTROL), C(0)
44 * disable the Sheevaplug's L2 cache, invalidate all caches
47 /* flush caches. 926ejs manual says we have to do it iteratively. */
49 MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
51 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
54 /* make the l2 cache pay attention */
55 MOVW $(PHYSIO+0x20100), R1 /* CPUCSREG */
57 ORR $(1<<3), R2 /* cpu->l2cfg |= L2exists */
61 /* invalidate l2 cache */
62 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
65 /* disable l2 cache. do this while l1 caches are off */
66 MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
67 /* disabling write allocation is probably for cortex-a8 errata 460075 */
68 /* l2 off, no wr alloc, no streaming */
69 BIC $(CpTCl2ena | CpTCl2wralloc | CpTCldcstream), R1
70 MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
73 /* flush caches. 926ejs manual says we have to do it iteratively. */
75 MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
81 MOVW $PADDR(MACHADDR), R4 /* address of Mach */
84 ADD $4, R4 /* bump PTE address */
85 CMP.S $PADDR(L1+L1X(0)), R4
89 * set up the MMU page table
92 /* clear all PTEs first, to provide a default */
94 MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
97 CMP.S $PADDR(L1+16*KiB), R4
100 /* double map of PHYSDRAM, KZERO to PHYSDRAM for first few MBs */
101 MOVW $PTEDRAM, R2 /* PTE bits */
102 MOVW $PHYSDRAM, R3 /* pa */
103 MOVW $PADDR(L1+L1X(PHYSDRAM)), R4 /* address of PTE for PHYSDRAM */
111 * back up and fill in PTEs for memory at KZERO
112 * there is 1 bank of 512MB of SDRAM at PHYSDRAM
114 MOVW $PTEDRAM, R2 /* PTE bits */
116 MOVW $PADDR(L1+L1X(KZERO)), R4 /* start with PTE for KZERO */
117 MOVW $512, R5 /* inner loop count */
118 _ptekrw: /* set PTEs for 512MiB */
124 * back up and fill in PTE for MMIO
126 MOVW $PTEIO, R2 /* PTE bits */
128 MOVW $PADDR(L1+L1X(VIRTIO)), R4 /* start with PTE for VIRTIO */
131 /* mmu.c sets up the vectors later */
134 * set up a temporary stack; avoid data & bss segments
136 MOVW $(PHYSDRAM | (128*1024*1024)), R13
139 /* set the domain access control */
143 /* set the translation table base */
148 BL pidput(SB) /* paranoia */
150 /* the little dance to turn the MMU & caches on */
157 /* warp the PC into the virtual map */
162 * now running at KZERO+something!
165 MOVW $setR12(SB), R12 /* reload the SB */
168 * set up temporary stack again, in case we've just switched
169 * to a new register set.
171 MOVW $(KZERO|(128*1024*1024)), R13
173 /* can now execute arbitrary C code */
178 /* undo double map of 0, KZERO */
179 MOVW $PADDR(L1+L1X(0)), R4 /* address of PTE for 0 */
184 ADD $4, R4 /* bump PTE address */
185 ADD $MiB, R0 /* bump pa */
189 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinvse
190 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
194 /* pass Mach to main and set up the stack */
195 MOVW $(MACHADDR), R0 /* Mach */
197 ADD $(MACHSIZE), R13 /* stack pointer */
198 SUB $4, R13 /* space for link register */
200 BL main(SB) /* void main(Mach*) */
205 TEXT _reset(SB), 1, $-4
206 /* turn the caches off */
207 MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R0
211 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
212 BIC $(CpCwb|CpCicache|CpCdcache|CpCalign), R0
213 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
217 /* redo double map of 0, KZERO */
218 MOVW $(L1+L1X(0)), R4 /* address of PTE for 0 */
219 MOVW $PTEDRAM, R2 /* PTE bits */
223 ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */
225 ADD $4, R4 /* bump PTE address */
226 ADD $MiB, R3 /* bump pa */
233 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
234 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
237 /* back to 29- or 26-bit addressing, mainly for SB */
238 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
239 BIC $(CpCd32|CpCi32), R0
240 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
243 /* turn the MMU off */
250 /* set new reset vector */
252 MOVW $0xe59ff018, R3 /* MOVW 0x18(R15), R15 */
256 MOVW $PHYSBOOTROM, R3
257 MOVW R3, 0x20(R2) /* where $0xe59ff018 jumps to */
263 /* ...and jump to it */
264 MOVW R2, R15 /* software reboot */
265 _limbo: /* should not get here... */
266 B _limbo /* ... and can't get out */
267 BL _div(SB) /* hack to load _div, etc. */
269 TEXT _r15warp(SB), 1, $-4
276 /* clobbers R1, R6 */
277 TEXT myputc(SB), 1, $-4
281 BIC.S $~(1<<5), R1 /* (x->lsr & LSRthre) == 0? */
283 MOVW R3, (R6) /* print */
291 TEXT l1cacheson(SB), 1, $-4
293 ORR $(PsrDirq|PsrDfiq), R5, R4
294 MOVW R4, CPSR /* splhi */
296 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
297 ORR $(CpCdcache|CpCicache|CpCwb), R0
298 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
301 MOVW R5, CPSR /* splx */
304 TEXT l1cachesoff(SB), 1, $-4
305 MOVM.DB.W [R14], (SP) /* save lr on stack */
308 ORR $(PsrDirq|PsrDfiq), R5, R4
309 MOVW R4, CPSR /* splhi */
313 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
314 BIC $(CpCdcache|CpCicache|CpCwb), R0
315 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
318 MOVW R5, CPSR /* splx */
319 MOVM.IA.W (SP), [R14] /* restore lr */
323 * cache* functions affect only the L1 caches, which are VIVT.
326 TEXT cachedwb(SB), 1, $-4 /* D writeback */
327 MOVW CPSR, R3 /* splhi */
328 ORR $(PsrDirq), R3, R1
331 BARRIERS /* force outstanding stores to cache */
332 /* keep writing back dirty cache lines until no more exist */
334 MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwb), CpCACHEtest
336 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
339 MOVW R3, CPSR /* splx */
342 TEXT cachedwbse(SB), 1, $-4 /* D writeback SE */
343 MOVW R0, R2 /* first arg: address */
345 MOVW CPSR, R3 /* splhi */
346 ORR $(PsrDirq), R3, R1
349 BARRIERS /* force outstanding stores to cache */
350 MOVW 4(FP), R1 /* second arg: size */
352 // CMP.S $(4*1024), R1
355 BIC $(CACHELINESZ-1), R2
357 MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
361 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
364 MOVW R3, CPSR /* splx */
367 TEXT cachedwbinv(SB), 1, $-4 /* D writeback+invalidate */
368 MOVW CPSR, R3 /* splhi */
369 ORR $(PsrDirq), R3, R1
372 BARRIERS /* force outstanding stores to cache */
373 /* keep writing back dirty cache lines until no more exist */
375 MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
377 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
380 MOVW R3, CPSR /* splx */
383 TEXT cachedwbinvse(SB), 1, $-4 /* D writeback+invalidate SE */
384 MOVW R0, R2 /* first arg: address */
386 MOVW CPSR, R3 /* splhi */
387 ORR $(PsrDirq), R3, R1
390 BARRIERS /* force outstanding stores to cache */
391 MOVW 4(FP), R1 /* second arg: size */
394 // CMP.S $(4*1024), R1
397 BIC $(CACHELINESZ-1), R2
399 MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEwbi), CpCACHEse
403 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
406 MOVW R3, CPSR /* splx */
409 TEXT cachedinvse(SB), 1, $-4 /* D invalidate SE */
410 MOVW R0, R2 /* first arg: address */
412 MOVW CPSR, R3 /* splhi */
413 ORR $(PsrDirq), R3, R1
416 MOVW 4(FP), R1 /* second arg: size */
419 // CMP.S $(4*1024), R1
422 BIC $(CACHELINESZ-1), R2
424 MCR CpSC, 0, R2, C(CpCACHE), C(CpCACHEinvd), CpCACHEse
428 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
431 MOVW R3, CPSR /* splx */
434 TEXT cacheuwbinv(SB), 1, $-4 /* D+I writeback+invalidate */
435 MOVW CPSR, R3 /* splhi */
436 ORR $(PsrDirq), R3, R1
439 BARRIERS /* force outstanding stores to cache */
440 /* keep writing back dirty cache lines until no more exist */
441 _uwbinv: /* D writeback+invalidate */
442 MRC CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
444 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
447 MOVW $0, R0 /* I invalidate */
448 MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
449 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
452 MOVW R3, CPSR /* splx */
455 TEXT cacheiinv(SB), 1, $-4 /* I invalidate */
457 MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
458 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
462 TEXT cachedinv(SB), 1, $-4 /* D invalidate */
465 MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
466 /* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
473 * these functions assume that the necessary l1 cache operations have been
474 * or will be done explicitly by the caller.
477 /* enable l2 cache in config coproc. reg. do this while l1 caches are off. */
478 TEXT l2cachecfgon(SB), 1, $-4
480 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
483 MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
484 ORR $(CpTCl2ena | CpTCl2prefdis), R1 /* l2 on, prefetch off */
485 MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
489 /* disable l2 cache in config coproc. reg. do this while l1 caches are off. */
490 TEXT l2cachecfgoff(SB), 1, $-4
492 MRC CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
494 MCR CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
497 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
501 TEXT l2cacheuwb(SB), 1, $-4 /* L2 unified writeback */
502 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
506 TEXT l2cacheuwbse(SB), 1, $-4 /* L2 unified writeback SE */
507 MOVW R0, R2 /* first arg: address */
509 MOVW CPSR, R3 /* splhi */
510 ORR $(PsrDirq), R3, R1
513 MOVW 4(FP), R1 /* second arg: size */
516 BIC $(CACHELINESZ-1), R2
518 MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
524 MOVW R3, CPSR /* splx */
527 TEXT l2cacheuwbinv(SB), 1, $-4 /* L2 unified writeback+invalidate */
528 MOVW CPSR, R3 /* splhi */
529 ORR $(PsrDirq), R3, R1
532 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
534 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
537 MOVW R3, CPSR /* splx */
540 TEXT l2cacheuwbinvse(SB), 1, $-4 /* L2 unified writeback+invalidate SE */
541 MOVW R0, R2 /* first arg: address */
543 MOVW CPSR, R3 /* splhi */
544 ORR $(PsrDirq), R3, R1
547 MOVW 4(FP), R1 /* second arg: size */
550 BIC $(CACHELINESZ-1), R2
552 MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
554 MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
560 MOVW R3, CPSR /* splx */
563 TEXT l2cacheuinv(SB), 1, $-4 /* L2 unified invalidate */
564 MCR CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
568 TEXT l2cacheuinvse(SB), 1, $-4 /* L2 unified invalidate SE */
569 MOVW R0, R2 /* first arg: address */
571 MOVW CPSR, R3 /* splhi */
572 ORR $(PsrDirq), R3, R1
575 MOVW 4(FP), R1 /* second arg: size */
578 BIC $(CACHELINESZ-1), R2
580 MCR CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
586 MOVW R3, CPSR /* splx */
590 * enable mmu, i and d caches, and high vector
592 TEXT mmuenable(SB), 1, $-4
593 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
594 ORR $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
596 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
600 TEXT mmudisable(SB), 1, $-4
601 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
602 BIC $(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
603 MCR CpSC, 0, R0, C(CpCONTROL), C(0)
607 TEXT mmuinvalidate(SB), 1, $-4 /* invalidate all */
609 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
613 TEXT mmuinvalidateaddr(SB), 1, $-4 /* invalidate single entry */
614 MCR CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
618 TEXT cpidget(SB), 1, $-4 /* main ID */
619 MRC CpSC, 0, R0, C(CpID), C(0), CpIDid
622 TEXT cpctget(SB), 1, $-4 /* cache type */
623 MRC CpSC, 0, R0, C(CpID), C(0), CpIDct
626 TEXT controlget(SB), 1, $-4 /* control */
627 MRC CpSC, 0, R0, C(CpCONTROL), C(0)
630 TEXT ttbget(SB), 1, $-4 /* translation table base */
631 MRC CpSC, 0, R0, C(CpTTB), C(0)
634 TEXT ttbput(SB), 1, $-4 /* translation table base */
635 MCR CpSC, 0, R0, C(CpTTB), C(0)
639 TEXT dacget(SB), 1, $-4 /* domain access control */
640 MRC CpSC, 0, R0, C(CpDAC), C(0)
643 TEXT dacput(SB), 1, $-4 /* domain access control */
644 MCR CpSC, 0, R0, C(CpDAC), C(0)
648 TEXT fsrget(SB), 1, $-4 /* fault status */
649 MRC CpSC, 0, R0, C(CpFSR), C(0)
652 TEXT farget(SB), 1, $-4 /* fault address */
653 MRC CpSC, 0, R0, C(CpFAR), C(0x0)
656 TEXT pidget(SB), 1, $-4 /* address translation pid */
657 MRC CpSC, 0, R0, C(CpPID), C(0x0)
660 TEXT pidput(SB), 1, $-4 /* address translation pid */
661 MCR CpSC, 0, R0, C(CpPID), C(0x0)
665 TEXT splhi(SB), 1, $-4
666 MOVW $(MACHADDR+4), R2 /* save caller pc in Mach */
669 MOVW CPSR, R0 /* turn off interrupts */
670 ORR $(PsrDirq), R0, R1
674 TEXT spllo(SB), 1, $-4
676 BIC $(PsrDirq), R0, R1
680 TEXT splx(SB), 1, $-4
681 MOVW $(MACHADDR+0x04), R2 /* save caller pc in Mach */
684 MOVW R0, R1 /* reset interrupt level */
689 TEXT splxpc(SB), 1, $-4 /* for iunlock */
695 TEXT spldone(SB), 1, $0
698 TEXT islo(SB), 1, $-4
706 ORR $(PsrDfiq|PsrDirq), R0, R1
710 //TEXT splflo(SB), $-4
712 // BIC $(PsrDfiq), R0, R1
720 SWPW R0,(R1) /* fix: deprecated in armv7 */
723 //TEXT tas32(SB), 1, $-4
725 // MOVW $0xDEADDEAD, R0
730 // EOR R3, R3 /* R3 = 0 */
733 // MOVW $1, R15 /* abort: lock != 0 && lock != $0xDEADDEAD */
738 CLZ(0, 0) /* 0 is R0 */
741 TEXT setlabel(SB), 1, $-4
742 MOVW R13, 0(R0) /* sp */
743 MOVW R14, 4(R0) /* pc */
748 TEXT gotolabel(SB), 1, $-4
749 MOVW 0(R0), R13 /* sp */
750 MOVW 4(R0), R14 /* pc */
755 TEXT getcallerpc(SB), 1, $-4
759 TEXT _idlehands(SB), 1, $-4
761 // ORR $PsrDirq, R3, R1 /* splhi */
762 BIC $PsrDirq, R3, R1 /* spllo */
765 MOVW $0, R0 /* wait for interrupt */
766 MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
769 MOVW R3, CPSR /* splx */
772 TEXT barriers(SB), 1, $-4