3 /* use of SPRG registers in save/restore */
10 /* These only exist on the PPC 755: */
17 /* special instruction definitions */
18 #define BDNZ BC 16, 0,
20 #define MTCRF(r, crm) WORD $((31<<26)|((r)<<21)|(crm<<12)|(144<<1))
22 /* #define TLBIA WORD $((31<<26)|(370<<1)) Not implemented on the 603e */
23 #define TLBSYNC WORD $((31<<26)|(566<<1))
24 #define TLBLI(n) WORD $((31<<26)|((n)<<11)|(1010<<1))
25 #define TLBLD(n) WORD $((31<<26)|((n)<<11)|(978<<1))
27 /* on some models mtmsr doesn't synchronise enough (eg, 603e) */
30 #define UREGSPACE (UREGSIZE+8)
37 * use 0x000 as exception prefix
38 * enable machine check
41 MOVW $(MSR_ME|MSR_EE|MSR_IP), R4
47 /* except during trap handling, R0 is zero from now on */
50 /* setup SB for pre mmu */
55 /* before this we're not running above KZERO */
57 /* after this we are */
60 MOVW $0x2000000, R4 /* size */
61 MOVW $0, R3 /* base address */
62 RLWNM $0, R3, $~(CACHELINESZ-1), R5
67 ADD $(CACHELINESZ-1), R4
68 SRAW $CACHELINELOG, R4
76 /* BAT0, 3 unused, copy of BAT2 */
77 MOVW SPR(IBATL(2)), R3
78 MOVW R3, SPR(IBATL(0))
79 MOVW SPR(IBATU(2)), R3
80 MOVW R3, SPR(IBATU(0))
81 MOVW SPR(DBATL(2)), R3
82 MOVW R3, SPR(DBATL(0))
83 MOVW SPR(DBATU(2)), R3
84 MOVW R3, SPR(DBATU(0))
86 MOVW SPR(IBATL(2)), R3
87 MOVW R3, SPR(IBATL(3))
88 MOVW SPR(IBATU(2)), R3
89 MOVW R3, SPR(IBATU(3))
90 MOVW SPR(DBATL(2)), R3
91 MOVW R3, SPR(DBATL(3))
92 MOVW SPR(DBATU(2)), R3
93 MOVW R3, SPR(DBATU(3))
96 /* running with MMU on!! */
98 /* set R2 to correct value */
102 MOVW $MACHADDR, R(MACH)
103 ADD $(MACHSIZE-8), R(MACH), R1 /* set stack */
105 MOVW R0, R(USER) /* up-> set to zero */
106 MOVW R0, 0(R(MACH)) /* machno set to zero */
110 RETURN /* not reached */
113 * on return from this function we will be running in virtual mode.
114 * We set up the Block Address Translation (BAT) registers thus:
115 * 1) first 3 BATs are 256M blocks, starting from KZERO->0
116 * 2) remaining BAT maps last 256M directly
118 TEXT mmuinit0(SB), $0
119 /* reset all the tlbs */
132 /* BATs 0 and 1 cover memory from 0x00000000 to 0x20000000 */
134 /* KZERO -> 0, IBAT and DBAT, 256 MB */
135 MOVW $(KZERO|(0x7ff<<2)|2), R3
136 MOVW $(PTEVALID|PTEWRITE), R4 /* PTEVALID => Cache coherency on */
137 MOVW R3, SPR(IBATU(0))
138 MOVW R4, SPR(IBATL(0))
139 MOVW R3, SPR(DBATU(0))
140 MOVW R4, SPR(DBATL(0))
142 /* KZERO+256M -> 256M, IBAT and DBAT, 256 MB */
145 MOVW R3, SPR(IBATU(1))
146 MOVW R4, SPR(IBATL(1))
147 MOVW R3, SPR(DBATU(1))
148 MOVW R4, SPR(DBATL(1))
150 /* FPGABASE -> FPGABASE, DBAT, 16 MB */
151 MOVW $(FPGABASE|(0x7f<<2)|2), R3
152 MOVW $(FPGABASE|PTEWRITE|PTEUNCACHED), R4 /* FPGA memory, don't cache */
153 MOVW R3, SPR(DBATU(2))
154 MOVW R4, SPR(DBATL(2))
157 MOVW R0, SPR(IBATU(2))
158 MOVW R0, SPR(IBATL(2))
160 /* direct map last block, uncached, (not guarded, doesn't work for BAT), DBAT only */
161 MOVW $(INTMEM|(0x7ff<<2)|2), R3
162 MOVW $(INTMEM|PTEWRITE|PTEUNCACHED), R4 /* Don't set PTEVALID here */
163 MOVW R3, SPR(DBATU(3))
164 MOVW R4, SPR(DBATL(3))
167 MOVW R0, SPR(IBATU(3))
168 MOVW R0, SPR(IBATL(3))
170 /* BAT 2 covers memory from 0x00000000 to 0x10000000 */
172 /* KZERO -> 0, IBAT2 and DBAT2, 256 MB */
173 MOVW $(KZERO|(0x7ff<<2)|2), R3
174 MOVW $(PTEVALID|PTEWRITE), R4 /* PTEVALID => Cache coherency on */
175 MOVW R3, SPR(DBATU(2))
176 MOVW R4, SPR(DBATL(2))
177 MOVW R3, SPR(IBATU(2))
178 MOVW R4, SPR(IBATL(2))
184 MOVW R3, SPR(SRR0) /* Stored PC for RFI instruction */
186 OR $(MSR_IR|MSR_DR|MSR_RI|MSR_FP), R4
188 RFI /* resume in kernel mode in caller */
194 MOVFL $0xD, FPSCR(6) /* VE, OE, ZE */
201 FMOVD $4503601774854144.0, F27
237 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
239 RLWNM $0, R3, $~MSR_EE, R4
250 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
252 RLWMI $0, R3, $MSR_EE, R4
271 RLWNM $0, R3, $MSR_EE, R3
274 TEXT setlabel(SB), $-4
281 TEXT gotolabel(SB), $-4
289 MOVW $(UTZERO+32), R5 /* header appears in text */
290 MOVW $(MSR_EE|MSR_PR|MSR_IR|MSR_DR|MSR_RI), R4
296 TEXT dczap(SB), $-4 /* dczap(virtaddr, count) */
298 RLWNM $0, R3, $~(CACHELINESZ-1), R5
303 ADD $(CACHELINESZ-1), R4
304 SRAW $CACHELINELOG, R4
314 TEXT dcflush(SB), $-4 /* dcflush(virtaddr, count) */
316 RLWNM $0, R3, $~(CACHELINESZ-1), R5
321 ADD $(CACHELINESZ-1), R4
322 SRAW $CACHELINELOG, R4
331 TEXT icflush(SB), $-4 /* icflush(virtaddr, count) */
333 RLWNM $0, R3, $~(CACHELINESZ-1), R5
338 ADD $(CACHELINESZ-1), R4
339 SRAW $CACHELINELOG, R4
341 icf0: ICBI (R5) /* invalidate the instruction cache */
353 DCBF (R4) /* fix for 603x bug */
365 TEXT tlbflushall(SB), $0
378 TEXT tlbflush(SB), $0
387 MOVW LR, R31 /* for trace back */
390 /* On an imiss, we get here. If we can resolve it, we do.
391 * Otherwise take the real trap. The code at the vector is
392 * MOVW R0, SPR(SAVER0) No point to this, of course
394 * MOVW R0, SPR(SAVELR)
395 * BL imiss(SB) or dmiss, as the case may be
401 MOVW 0xc(R1), R3 /* count m->tlbfault */
404 MOVW 0x10(R1), R3 /* count m->imiss */
409 MOVW SPR(HASH1), R1 /* (phys) pointer into the hash table */
410 ADD $BY2PTEG, R1, R2 /* end pointer */
411 MOVW SPR(iCMP), R3 /* pattern to look for */
415 BEQ imiss2 /* found the entry */
417 CMP R1, R2 /* test end of loop */
418 BNE imiss1 /* Loop */
419 /* Failed to find an entry; take the full trap */
421 MTCRF(1, 0x80) /* restore CR0 bits (they're auto saved in SRR1) */
424 /* Found the entry */
425 MOVW 4(R1), R2 /* Phys addr */
430 /* Restore Registers */
431 MOVW SPR(SRR1), R1 /* Restore the CR0 field of the CR register from SRR1 */
437 /* On a data load or store miss, we get here. If we can resolve it, we do.
438 * Otherwise take the real trap
443 MOVW 0xc(R1), R3 /* count m->tlbfault */
446 MOVW 0x14(R1), R3 /* count m->dmiss */
450 MOVW SPR(HASH1), R1 /* (phys) pointer into the hash table */
451 ADD $BY2PTEG, R1, R2 /* end pointer */
452 MOVW SPR(DCMP), R3 /* pattern to look for */
456 BEQ dmiss2 /* found the entry */
458 CMP R1, R2 /* test end of loop */
459 BNE dmiss1 /* Loop */
460 /* Failed to find an entry; take the full trap */
462 MTCRF(1, 0x80) /* restore CR0 bits (they're auto saved in SRR1) */
465 /* Found the entry */
466 MOVW 4(R1), R2 /* Phys addr */
470 /* Restore Registers */
471 MOVW SPR(SRR1), R1 /* Restore the CR0 field of the CR register from SRR1 */
478 * When a trap sets the TGPR bit (TLB miss traps do this),
479 * registers get remapped: R0-R31 are temporarily inaccessible,
480 * and Temporary Registers TR0-TR3 are mapped onto R0-R3.
481 * While this bit is set, R4-R31 cannot be used.
482 * The code at the vector has executed this code before
484 * MOVW R0, SPR(SAVER0) No point to this, of course
486 * MOVW R0, SPR(SAVELR)
488 * SAVER0 can be reused. We're not interested in the value of TR0
492 RLWNM $0, R1, $~MSR_TGPR, R1 /* Clear the dreaded TGPR bit in the MSR */
496 /* Now the GPRs are what they're supposed to be, save R0 again */
498 /* Fall through to trapvec */
501 * traps force memory mapping off.
502 * the following code has been executed at the exception
504 * MOVW R0, SPR(SAVER0)
506 * MOVW R0, SPR(SAVELR)
510 TEXT trapvec(SB), $-4
513 MOVW R0, SPR(SAVEXX) /* vector */
515 /* did we come from user space */
521 /* switch to kernel stack */
523 MOVW $MACHPADDR, R1 /* PADDR(m->) */
524 MOVW 8(R1), R1 /* m->proc */
525 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc) */
526 MOVW 8(R1), R1 /* m->proc->kstack */
527 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc->kstack) */
528 ADD $(KSTACK-UREGSIZE), R1 /* make room on stack */
537 RLWNM $0, R1, $~KZERO, R1 /* set stack pointer */
540 BL saveureg(SB) /* addressed relative to PC */
545 * enter with stack set and mapped.
546 * on return, SB (R2) has been set, and R3 has the Ureg*,
547 * the MMU has been re-enabled, kernel text and PC are in KSEG,
548 * R(MACH) has been set, and R0 contains 0.
551 TEXT saveureg(SB), $-4
555 MOVMW R2, 48(R1) /* save r2 .. r31 in 48(R1) .. 164(R1) */
556 MOVW $MACHPADDR, R(MACH) /* PADDR(m->) */
557 MOVW 8(R(MACH)), R(USER) /* up-> */
558 MOVW $MACHADDR, R(MACH) /* m-> */
569 MOVW SPR(SAVELR), R6 /* LR */
573 MOVW R0, 16(R1) /* old PC */
575 MOVW R0, 12(R1) /* old status */
577 MOVW R0, 8(R1) /* cause/vector */
594 ADD $8, R1, R3 /* Ureg* */
595 OR $KZERO, R3 /* fix ureg */
596 STWCCC R3, (R1) /* break any pending reservations */
597 MOVW $0, R0 /* compiler/linker expect R0 to be zero */
598 MOVW $setSB(SB), R2 /* SB register */
601 OR $(MSR_IR|MSR_DR|MSR_FP|MSR_RI), R5 /* enable MMU */
604 OR $KZERO, R31 /* return PC in KSEG0 */
606 OR $KZERO, R1 /* fix stack pointer */
607 RFI /* returns to trap handler */
610 * restore state from Ureg and return from trap/interrupt
616 MOVMW 48(R1), R2 /* restore r2 through r31 */
619 MOVW R0, SPR(SAVER0) /* resave saved R0 */
625 MOVW R0, CR /* Condition register*/
630 MOVW R0, SPR(SRR0) /* old PC */
632 MOVW R0, SPR(SRR1) /* old MSR */
634 MOVW 44(R1), R1 /* old SP */
654 TEXT getdsisr(SB), $0
715 /* Power PC 603e specials */
716 TEXT getimiss(SB), $0
728 TEXT getdmiss(SB), $0
744 TEXT gethash1(SB), $0
748 TEXT puthash1(SB), $0
752 TEXT gethash2(SB), $0
756 TEXT puthash2(SB), $0
798 FMOVD F10, (10*8)(R3)
799 FMOVD F11, (11*8)(R3)
800 FMOVD F12, (12*8)(R3)
801 FMOVD F13, (13*8)(R3)
802 FMOVD F14, (14*8)(R3)
803 FMOVD F15, (15*8)(R3)
804 FMOVD F16, (16*8)(R3)
805 FMOVD F17, (17*8)(R3)
806 FMOVD F18, (18*8)(R3)
807 FMOVD F19, (19*8)(R3)
808 FMOVD F20, (20*8)(R3)
809 FMOVD F21, (21*8)(R3)
810 FMOVD F22, (22*8)(R3)
811 FMOVD F23, (23*8)(R3)
812 FMOVD F24, (24*8)(R3)
813 FMOVD F25, (25*8)(R3)
814 FMOVD F26, (26*8)(R3)
815 FMOVD F27, (27*8)(R3)
816 FMOVD F28, (28*8)(R3)
817 FMOVD F29, (29*8)(R3)
818 FMOVD F30, (30*8)(R3)
819 FMOVD F31, (31*8)(R3)
824 TEXT fprestore(SB), $0
837 FMOVD (10*8)(R3), F10
838 FMOVD (11*8)(R3), F11
839 FMOVD (12*8)(R3), F12
840 FMOVD (13*8)(R3), F13
841 FMOVD (14*8)(R3), F14
842 FMOVD (15*8)(R3), F15
843 FMOVD (16*8)(R3), F16
844 FMOVD (17*8)(R3), F17
845 FMOVD (18*8)(R3), F18
846 FMOVD (19*8)(R3), F19
847 FMOVD (20*8)(R3), F20
848 FMOVD (21*8)(R3), F21
849 FMOVD (22*8)(R3), F22
850 FMOVD (23*8)(R3), F23
851 FMOVD (24*8)(R3), F24
852 FMOVD (25*8)(R3), F25
853 FMOVD (26*8)(R3), F26
854 FMOVD (27*8)(R3), F27
855 FMOVD (28*8)(R3), F28
856 FMOVD (29*8)(R3), F29
857 FMOVD (30*8)(R3), F30
858 FMOVD (31*8)(R3), F31
861 TEXT dcacheenb(SB), $0
863 MOVW SPR(HID0), R4 /* Get HID0 and clear unwanted bits */
864 RLWNM $0, R4, $~(HID_DLOCK), R4
865 MOVW $(HID_DCFI|HID_DCE), R5
870 // MOVW R5, SPR(HID0) /* Cache enable and flash invalidate */
871 MOVW R3, SPR(HID0) /* Cache enable */
875 TEXT icacheenb(SB), $0
877 MOVW SPR(HID0), R4 /* Get HID0 and clear unwanted bits */
878 RLWNM $0, R4, $~(HID_ILOCK), R4
879 MOVW $(HID_ICFI|HID_ICE), R5
884 MOVW R5, SPR(HID0) /* Cache enable and flash invalidate */
885 MOVW R3, SPR(HID0) /* Cache enable */
907 TEXT dcachedis(SB), $0
909 /* MOVW SPR(HID0), R4
910 RLWNM $0, R4, $~(HID_DCE), R4
911 MOVW R4, SPR(HID0) /* L1 Cache disable */
914 RLWNM $0, R4, $~(0x80000000), R4
915 MOVW R4, SPR(1017) /* L2 Cache disable */
920 TEXT l2disable(SB), $0
923 RLWNM $0, R4, $~(0x80000000), R4
924 MOVW R4, SPR(1017) /* L2 Cache disable */
929 MOVW SPR(DBATU(0)), R4
931 MOVW SPR(DBATL(0)), R4
933 MOVW SPR(IBATU(0)), R4
935 MOVW SPR(IBATL(0)), R4
937 MOVW SPR(DBATU(1)), R4
939 MOVW SPR(DBATL(1)), R4
941 MOVW SPR(IBATU(1)), R4
943 MOVW SPR(IBATL(1)), R4
945 MOVW SPR(DBATU(2)), R4
947 MOVW SPR(DBATL(2)), R4
949 MOVW SPR(IBATU(2)), R4
951 MOVW SPR(IBATL(2)), R4
953 MOVW SPR(DBATU(3)), R4
955 MOVW SPR(DBATL(3)), R4
957 MOVW SPR(IBATU(3)), R4
959 MOVW SPR(IBATL(3)), R4
963 TEXT setdbat0(SB), $0
965 MOVW R4, SPR(DBATU(0))
967 MOVW R4, SPR(DBATL(0))
971 TEXT mmudisable(SB), $0
976 MOVW R4, SPR(SRR0) /* Stored PC for RFI instruction */
979 MOVW $(MSR_IR|MSR_DR|MSR_RI|MSR_FP), R5
983 MOVW SPR(HID0), R4 /* Get HID0 and clear unwanted bits */
984 MOVW $(HID_ICE|HID_DCE), R5
986 MOVW R4, SPR(HID0) /* Cache disable */
987 RFI /* resume caller with MMU off */
995 TEXT mul64fract(SB), $0
1001 MULLW R10, R5, R13 /* c2 = lo(a1*b1) */
1003 MULLW R10, R4, R12 /* c1 = lo(a1*b0) */
1004 MULHWU R10, R4, R7 /* hi(a1*b0) */
1005 ADD R7, R13 /* c2 += hi(a1*b0) */
1007 MULLW R9, R5, R6 /* lo(a0*b1) */
1008 MULHWU R9, R5, R7 /* hi(a0*b1) */
1009 ADDC R6, R12 /* c1 += lo(a0*b1) */
1010 ADDE R7, R13 /* c2 += hi(a0*b1) + carry */
1012 MULHWU R9, R4, R7 /* hi(a0*b0) */
1013 ADDC R7, R12 /* c1 += hi(a0*b0) */
1014 ADDE R0, R13 /* c2 += carry */