3 /* use of SPRG registers in save/restore */
9 /* special instruction definitions */
13 #define TLBIA WORD $((31<<26)|(307<<1))
14 #define TLBSYNC WORD $((31<<26)|(566<<1))
16 /* on some models mtmsr doesn't synchronise enough (eg, 603e) */
17 #define MSRSYNC SYNC; ISYNC
19 #define UREGSPACE (UREGSIZE+8)
26 * use 0x000 as exception prefix
27 * enable machine check
30 MOVW $(MSR_EE|MSR_IP), R4
37 /* except during trap handling, R0 is zero from now on */
40 /* setup SB for pre mmu */
47 /* running with MMU on!! */
49 /* set R2 to correct value */
52 /* debugger sets R1 to top of usable memory +1 */
58 MOVW $mach0(SB), R(MACH)
59 ADD $(MACHSIZE-8), R(MACH), R1 /* set stack */
66 RETURN /* not reached */
68 GLOBL mach0(SB), $(MAXMACH*BY2PG)
72 * on return from this function we will be running in virtual mode.
73 * We set up the Block Address Translation (BAT) registers thus:
74 * 1) first 3 BATs are 256M blocks, starting from KZERO->0
75 * 2) remaining BAT maps last 256M directly
78 /* reset all the tlbs */
89 MOVW $(KZERO|(0x7ff<<2)|2), R3
90 MOVW $(PTEVALID|PTEWRITE), R4
91 MOVW R3, SPR(IBATU(0))
92 MOVW R4, SPR(IBATL(0))
93 MOVW R3, SPR(DBATU(0))
94 MOVW R4, SPR(DBATL(0))
96 /* KZERO+256M -> 256M */
99 MOVW R3, SPR(IBATU(1))
100 MOVW R4, SPR(IBATL(1))
101 MOVW R3, SPR(DBATU(1))
102 MOVW R4, SPR(DBATL(1))
104 /* KZERO+512M -> 512M */
107 MOVW R3, SPR(IBATU(2))
108 MOVW R4, SPR(IBATL(2))
109 MOVW R3, SPR(DBATU(2))
110 MOVW R4, SPR(DBATL(2))
112 /* direct map last block, uncached, (?guarded) */
113 MOVW $((0xf<<28)|(0x7ff<<2)|2), R3
114 MOVW $((0xf<<28)|PTE1_I|PTE1_G|PTE1_RW), R4
115 MOVW R3, SPR(DBATU(3))
116 MOVW R4, SPR(DBATL(3))
119 MOVW R0, SPR(IBATU(3))
120 MOVW R0, SPR(IBATL(3))
127 OR $(MSR_IR|MSR_DR), R4
129 RFI /* resume in kernel mode in caller */
135 MOVFL $0xD,FPSCR(6) /* VE, OE, ZE */
142 FMOVD $4503601774854144.0, F27
178 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
180 RLWNM $0, R3, $~MSR_EE, R4
191 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
193 RLWMI $0, R3, $MSR_EE, R4
212 RLWNM $0, R3, $MSR_EE, R3
215 TEXT setlabel(SB), $-4
222 TEXT gotolabel(SB), $-4
230 MOVW $(UTZERO+32), R5 /* header appears in text */
231 MOVW $(MSR_EE|MSR_PR|MSR_ME|MSR_IR|MSR_DR|MSR_RI), R4
237 TEXT icflush(SB), $-4 /* icflush(virtaddr, count) */
239 RLWNM $0, R3, $~(CACHELINESZ-1), R5
242 ADD $(CACHELINESZ-1), R4
243 SRAW $CACHELINELOG, R4
251 TEXT dcflush(SB), $-4 /* dcflush(virtaddr, count) */
253 RLWNM $0, R3, $~(CACHELINESZ-1), R5
258 ADD $(CACHELINESZ-1), R4
259 SRAW $CACHELINELOG, R4
273 DCBF (R4) /* fix for 603x bug */
284 TEXT _xinc(SB),$0 /* void _xinc(long *); */
287 DCBF (R4) /* fix for 603x bug */
294 TEXT _xdec(SB),$0 /* long _xdec(long *); */
297 DCBF (R4) /* fix for 603x bug */
304 TEXT cmpswap(SB),$0 /* int cmpswap(long*, long, long) */
305 MOVW R3, R4 /* addr */
308 DCBF (R4) /* fix for 603x bug? */
336 TEXT getdsisr(SB), $0
383 TEXT tlbflushall(SB), $0
396 TEXT tlbflush(SB), $0
402 MOVW LR, R31 /* for trace back */
406 * traps force memory mapping off.
407 * the following code has been executed at the exception
409 * MOVW R0, SPR(SAVER0)
411 * MOVW R0, SPR(SAVELR)
414 TEXT trapvec(SB), $-4
417 MOVW R0, SPR(SAVEXX) /* vector */
419 /* did we come from user space */
425 /* switch to kernel stack */
429 RLWNM $0, R2, $~KZERO, R2 /* PADDR(setSB) */
430 MOVW $mach0(SB), R1 /* m-> */
431 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->) */
432 MOVW 8(R1), R1 /* m->proc */
433 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc) */
434 MOVW 8(R1), R1 /* m->proc->kstack */
435 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc->kstack) */
436 ADD $(KSTACK-UREGSIZE), R1
444 RLWNM $0, R1, $~KZERO, R1 /* PADDR(R1) */
451 * enter with stack set and mapped.
452 * on return, SB (R2) has been set, and R3 has the Ureg*,
453 * the MMU has been re-enabled, kernel text and PC are in KSEG,
454 * R(MACH) has been set, and R0 contains 0.
457 TEXT saveureg(SB), $-4
461 MOVMW R2, 48(R1) /* r2:r31 */
463 RLWNM $0, R2, $~KZERO, R2 /* PADDR(setSB) */
464 MOVW $mach0(SB), R(MACH)
465 RLWNM $0, R(MACH), $~KZERO, R(MACH) /* PADDR(m->) */
466 MOVW 8(R(MACH)), R(USER)
467 MOVW $mach0(SB), R(MACH)
479 MOVW SPR(SAVELR), R6 /* LR */
483 MOVW R0, 16(R1) /* old PC */
485 MOVW R0, 12(R1) /* old status */
487 MOVW R0, 8(R1) /* cause/vector */
488 ADD $8, R1, R3 /* Ureg* */
489 OR $KZERO, R3 /* fix ureg */
490 STWCCC R3, (R1) /* break any pending reservations */
491 MOVW $0, R0 /* compiler/linker expect R0 to be zero */
494 OR $(MSR_IR|MSR_DR|MSR_FP|MSR_RI), R5 /* enable MMU */
497 OR $KZERO, R31 /* return PC in KSEG0 */
499 OR $KZERO, R1 /* fix stack pointer */
500 RFI /* returns to trap handler */
503 * restore state from Ureg and return from trap/interrupt
509 MOVMW 48(R1), R2 /* r2:r31 */
523 MOVW R0, SPR(SRR0) /* old PC */
525 MOVW R0, SPR(SRR1) /* old MSR */
527 MOVW 44(R1), R1 /* old SP */
542 FMOVD F10, (10*8)(R3)
543 FMOVD F11, (11*8)(R3)
544 FMOVD F12, (12*8)(R3)
545 FMOVD F13, (13*8)(R3)
546 FMOVD F14, (14*8)(R3)
547 FMOVD F15, (15*8)(R3)
548 FMOVD F16, (16*8)(R3)
549 FMOVD F17, (17*8)(R3)
550 FMOVD F18, (18*8)(R3)
551 FMOVD F19, (19*8)(R3)
552 FMOVD F20, (20*8)(R3)
553 FMOVD F21, (21*8)(R3)
554 FMOVD F22, (22*8)(R3)
555 FMOVD F23, (23*8)(R3)
556 FMOVD F24, (24*8)(R3)
557 FMOVD F25, (25*8)(R3)
558 FMOVD F26, (26*8)(R3)
559 FMOVD F27, (27*8)(R3)
560 FMOVD F28, (28*8)(R3)
561 FMOVD F29, (29*8)(R3)
562 FMOVD F30, (30*8)(R3)
563 FMOVD F31, (31*8)(R3)
568 TEXT fprestore(SB), $0
581 FMOVD (10*8)(R3), F10
582 FMOVD (11*8)(R3), F11
583 FMOVD (12*8)(R3), F12
584 FMOVD (13*8)(R3), F13
585 FMOVD (14*8)(R3), F14
586 FMOVD (15*8)(R3), F15
587 FMOVD (16*8)(R3), F16
588 FMOVD (17*8)(R3), F17
589 FMOVD (18*8)(R3), F18
590 FMOVD (19*8)(R3), F19
591 FMOVD (20*8)(R3), F20
592 FMOVD (21*8)(R3), F21
593 FMOVD (22*8)(R3), F22
594 FMOVD (23*8)(R3), F23
595 FMOVD (24*8)(R3), F24
596 FMOVD (25*8)(R3), F25
597 FMOVD (26*8)(R3), F26
598 FMOVD (27*8)(R3), F27
599 FMOVD (28*8)(R3), F28
600 FMOVD (29*8)(R3), F29
601 FMOVD (30*8)(R3), F30
602 FMOVD (31*8)(R3), F31