3 /* use of SPRG registers in save/restore */
9 /* special instruction definitions */
13 #define TLBIA WORD $((31<<26)|(307<<1))
14 #define TLBSYNC WORD $((31<<26)|(566<<1))
16 /* on some models mtmsr doesn't synchronise enough (eg, 603e) */
17 #define MSRSYNC SYNC; ISYNC
19 #define UREGSPACE (UREGSIZE+8)
26 * use 0x000 as exception prefix
27 * enable machine check
30 MOVW $(MSR_EE|MSR_IP), R4
37 /* except during trap handling, R0 is zero from now on */
40 /* setup SB for pre mmu */
47 /* running with MMU on!! */
49 /* set R2 to correct value */
52 /* debugger sets R1 to top of usable memory +1 */
58 MOVW $mach0(SB), R(MACH)
59 ADD $(MACHSIZE-8), R(MACH), R1 /* set stack */
66 RETURN /* not reached */
68 GLOBL mach0(SB), $(MAXMACH*BY2PG)
72 * on return from this function we will be running in virtual mode.
73 * We set up the Block Address Translation (BAT) registers thus:
74 * 1) first 3 BATs are 256M blocks, starting from KZERO->0
75 * 2) remaining BAT maps last 256M directly
78 /* reset all the tlbs */
89 MOVW $(KZERO|(0x7ff<<2)|2), R3
90 MOVW $(PTEVALID|PTEWRITE), R4
91 MOVW R3, SPR(IBATU(0))
92 MOVW R4, SPR(IBATL(0))
93 MOVW R3, SPR(DBATU(0))
94 MOVW R4, SPR(DBATL(0))
96 /* KZERO+256M -> 256M */
99 MOVW R3, SPR(IBATU(1))
100 MOVW R4, SPR(IBATL(1))
101 MOVW R3, SPR(DBATU(1))
102 MOVW R4, SPR(DBATL(1))
104 /* KZERO+512M -> 512M */
107 MOVW R3, SPR(IBATU(2))
108 MOVW R4, SPR(IBATL(2))
109 MOVW R3, SPR(DBATU(2))
110 MOVW R4, SPR(DBATL(2))
112 /* direct map last block, uncached, (?guarded) */
113 MOVW $((0xf<<28)|(0x7ff<<2)|2), R3
114 MOVW $((0xf<<28)|PTE1_I|PTE1_G|PTE1_RW), R4
115 MOVW R3, SPR(DBATU(3))
116 MOVW R4, SPR(DBATL(3))
119 MOVW R0, SPR(IBATU(3))
120 MOVW R0, SPR(IBATL(3))
127 OR $(MSR_IR|MSR_DR), R4
129 RFI /* resume in kernel mode in caller */
135 MOVFL $0xD,FPSCR(6) /* VE, OE, ZE */
142 FMOVD $4503601774854144.0, F27
178 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
180 RLWNM $0, R3, $~MSR_EE, R4
191 MOVW R31, 4(R(MACH)) /* save PC in m->splpc */
193 RLWMI $0, R3, $MSR_EE, R4
212 RLWNM $0, R3, $MSR_EE, R3
215 TEXT setlabel(SB), $-4
222 TEXT gotolabel(SB), $-4
230 MOVW $(UTZERO+32), R5 /* header appears in text */
231 MOVW $(MSR_EE|MSR_PR|MSR_ME|MSR_IR|MSR_DR|MSR_RI), R4
237 TEXT icflush(SB), $-4 /* icflush(virtaddr, count) */
239 RLWNM $0, R3, $~(CACHELINESZ-1), R5
242 ADD $(CACHELINESZ-1), R4
243 SRAW $CACHELINELOG, R4
251 TEXT dcflush(SB), $-4 /* dcflush(virtaddr, count) */
253 RLWNM $0, R3, $~(CACHELINESZ-1), R5
258 ADD $(CACHELINESZ-1), R4
259 SRAW $CACHELINELOG, R4
274 DCBF (R4) /* fix for 603x bug */
285 TEXT cmpswap(SB),$0 /* int cmpswap(long*, long, long) */
286 MOVW R3, R4 /* addr */
289 DCBF (R4) /* fix for 603x bug? */
317 TEXT getdsisr(SB), $0
364 TEXT tlbflushall(SB), $0
377 TEXT tlbflush(SB), $0
383 MOVW LR, R31 /* for trace back */
387 * traps force memory mapping off.
388 * the following code has been executed at the exception
390 * MOVW R0, SPR(SAVER0)
392 * MOVW R0, SPR(SAVELR)
395 TEXT trapvec(SB), $-4
398 MOVW R0, SPR(SAVEXX) /* vector */
400 /* did we come from user space */
406 /* switch to kernel stack */
410 RLWNM $0, R2, $~KZERO, R2 /* PADDR(setSB) */
411 MOVW $mach0(SB), R1 /* m-> */
412 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->) */
413 MOVW 8(R1), R1 /* m->proc */
414 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc) */
415 MOVW 8(R1), R1 /* m->proc->kstack */
416 RLWNM $0, R1, $~KZERO, R1 /* PADDR(m->proc->kstack) */
417 ADD $(KSTACK-UREGSIZE), R1
425 RLWNM $0, R1, $~KZERO, R1 /* PADDR(R1) */
432 * enter with stack set and mapped.
433 * on return, SB (R2) has been set, and R3 has the Ureg*,
434 * the MMU has been re-enabled, kernel text and PC are in KSEG,
435 * R(MACH) has been set, and R0 contains 0.
438 TEXT saveureg(SB), $-4
442 MOVMW R2, 48(R1) /* r2:r31 */
444 RLWNM $0, R2, $~KZERO, R2 /* PADDR(setSB) */
445 MOVW $mach0(SB), R(MACH)
446 RLWNM $0, R(MACH), $~KZERO, R(MACH) /* PADDR(m->) */
447 MOVW 8(R(MACH)), R(USER)
448 MOVW $mach0(SB), R(MACH)
460 MOVW SPR(SAVELR), R6 /* LR */
464 MOVW R0, 16(R1) /* old PC */
466 MOVW R0, 12(R1) /* old status */
468 MOVW R0, 8(R1) /* cause/vector */
469 ADD $8, R1, R3 /* Ureg* */
470 OR $KZERO, R3 /* fix ureg */
471 STWCCC R3, (R1) /* break any pending reservations */
472 MOVW $0, R0 /* compiler/linker expect R0 to be zero */
475 OR $(MSR_IR|MSR_DR|MSR_FP|MSR_RI), R5 /* enable MMU */
478 OR $KZERO, R31 /* return PC in KSEG0 */
480 OR $KZERO, R1 /* fix stack pointer */
481 RFI /* returns to trap handler */
484 * restore state from Ureg and return from trap/interrupt
490 MOVMW 48(R1), R2 /* r2:r31 */
504 MOVW R0, SPR(SRR0) /* old PC */
506 MOVW R0, SPR(SRR1) /* old MSR */
508 MOVW 44(R1), R1 /* old SP */
523 FMOVD F10, (10*8)(R3)
524 FMOVD F11, (11*8)(R3)
525 FMOVD F12, (12*8)(R3)
526 FMOVD F13, (13*8)(R3)
527 FMOVD F14, (14*8)(R3)
528 FMOVD F15, (15*8)(R3)
529 FMOVD F16, (16*8)(R3)
530 FMOVD F17, (17*8)(R3)
531 FMOVD F18, (18*8)(R3)
532 FMOVD F19, (19*8)(R3)
533 FMOVD F20, (20*8)(R3)
534 FMOVD F21, (21*8)(R3)
535 FMOVD F22, (22*8)(R3)
536 FMOVD F23, (23*8)(R3)
537 FMOVD F24, (24*8)(R3)
538 FMOVD F25, (25*8)(R3)
539 FMOVD F26, (26*8)(R3)
540 FMOVD F27, (27*8)(R3)
541 FMOVD F28, (28*8)(R3)
542 FMOVD F29, (29*8)(R3)
543 FMOVD F30, (30*8)(R3)
544 FMOVD F31, (31*8)(R3)
549 TEXT fprestore(SB), $0
562 FMOVD (10*8)(R3), F10
563 FMOVD (11*8)(R3), F11
564 FMOVD (12*8)(R3), F12
565 FMOVD (13*8)(R3), F13
566 FMOVD (14*8)(R3), F14
567 FMOVD (15*8)(R3), F15
568 FMOVD (16*8)(R3), F16
569 FMOVD (17*8)(R3), F17
570 FMOVD (18*8)(R3), F18
571 FMOVD (19*8)(R3), F19
572 FMOVD (20*8)(R3), F20
573 FMOVD (21*8)(R3), F21
574 FMOVD (22*8)(R3), F22
575 FMOVD (23*8)(R3), F23
576 FMOVD (24*8)(R3), F24
577 FMOVD (25*8)(R3), F25
578 FMOVD (26*8)(R3), F26
579 FMOVD (27*8)(R3), F27
580 FMOVD (28*8)(R3), F28
581 FMOVD (29*8)(R3), F29
582 FMOVD (30*8)(R3), F30
583 FMOVD (31*8)(R3), F31