5 #define NOOP NOR R0, R0, R0
6 #define WAIT NOOP; NOOP
7 #define RETURN RET; NOOP
8 #define CONST(i, v) MOVW $((i) & 0xffff0000), v; OR $((i) & 0xffff), v;
9 #define GETMACH(r) CONST(MACHADDR, r)
14 #define ERET WORD $0x42000018
15 #define LL(base, rt) WORD $((060<<26)|((base)<<21)|((rt)<<16))
16 #define SC(base, rt) WORD $((070<<26)|((base)<<21)|((rt)<<16))
18 #define MFC0(src,sel,dst) WORD $(0x40000000|((src)<<11)|((dst)<<16)|(sel))
19 #define MTC0(src,dst,sel) WORD $(0x40800000|((dst)<<11)|((src)<<16)|(sel))
20 #define RDHWR(hwr, r) WORD $(0x7c00003b|((hwr)<<11)|((r)<<16))
25 #define CACHE BREAK /* overloaded op-code */
27 #define PI R((0 /* primary I cache */
28 #define PD R((1 /* primary D cache */
29 #define SD R((3 /* secondary combined I/D cache */
31 #define IWBI (0<<2))) /* index write-back invalidate */
32 #define ILT (1<<2))) /* index load tag */
33 #define IST (2<<2))) /* index store tag */
34 #define CDE (3<<2))) /* create dirty exclusive */
35 #define HINV (4<<2))) /* hit invalidate */
36 #define HWBI (5<<2))) /* hit write back invalidate */
37 #define HWB (6<<2))) /* hit write back */
38 #define HSV (7<<2))) /* hit set virtual */
54 MOVW R1, FCR31 /* permit only inexact and underflow */
74 MOVW $MACHADDR, R(MACH)
75 ADDU $(MACHSIZE-BY2V), R(MACH), SP
92 MOVW R0, 0(R(MACH)) /* m->machno = 0 */
93 MOVW R0, R(USER) /* up = nil */
117 MOVW $setR30(SB), R30
128 * Take first processor into user mode
129 * - argument is stack pointer to user
134 MOVW $(UTZERO+32), R2 /* header appears in text */
137 AND $(~KMODEMASK), R4
138 OR $(KUSER|IE|EXL), R4 /* switch to user mode, intrs on, exc */
139 MOVW R4, M(STATUS) /* " */
141 ERET /* clears EXL */
145 * manipulate interrupts
148 /* enable an interrupt; bit is in R1 */
157 /* disable an interrupt; bit is in R1 */
168 MOVW R31, 12(R(MACH)) /* save PC in m->splpc */
177 MOVW R31, 12(R(MACH)) /* save PC in m->splpc */
204 TEXT coherence(SB), $-4
211 TEXT setlabel(SB), $-4
217 TEXT gotolabel(SB), $-4
224 * the tlb routines need to be called at splhi.
235 TEXT getrandom(SB),$0
239 TEXT getpagemask(SB),$0
243 TEXT setpagemask(SB),$0
245 MOVW R0, R1 /* prevent accidents */
248 TEXT puttlbx(SB), $0 /* puttlbx(index, virt, phys0, phys1, pagemask) */
252 MOVW $((2*BY2PG-1) & ~0x1fff), R5
269 TEXT gettlbx(SB), $0 /* gettlbx(index, &entry) */
287 TEXT gettlbp(SB), $0 /* gettlbp(tlbvirt, &entry) */
313 TEXT gettlbvirt(SB), $0 /* gettlbvirt(index) */
326 * compute stlb hash index.
328 * M(TLBVIRT) [page & asid] in arg, result in arg.
329 * stir in swizzled asid; we get best results with asid in both high & low bits.
331 #define STLBHASH(arg, tmp) \
332 AND $0xFF, arg, tmp; \
333 SRL $(PGSHIFT+1), arg; \
335 SLL $(STLBLOG-8), tmp; \
337 CONST (STLBSIZE-1, tmp); \
340 TEXT stlbhash(SB), $0 /* for mmu.c */
344 TEXT utlbmiss(SB), $-4
346 MOVW R27, 12(R26) /* m->splpc = R27 */
350 MOVW R27,16(R26) /* m->tlbfault++ */
356 /* scale to a byte index (multiply by 12) */
357 SLL $1, R27, R26 /* × 2 */
358 ADDU R26, R27 /* × 3 */
359 SLL $2, R27 /* × 12 */
363 ADDU R26, R27 /* R27 = &m->stb[hash] */
365 MOVW M(BADVADDR), R26
369 BNE R26, utlbodd /* odd page? */
373 MOVW 4(R27), R26 /* R26 = m->stb[hash].phys0 */
374 BEQ R26, stlbm /* nothing cached? do it the hard way */
376 MOVW R26, M(TLBPHYS0)
377 MOVW 8(R27), R26 /* R26 = m->stb[hash].phys1 */
379 MOVW R26, M(TLBPHYS1) /* branch delay slot */
382 MOVW 8(R27), R26 /* R26 = m->stb[hash].phys1 */
383 BEQ R26, stlbm /* nothing cached? do it the hard way */
385 MOVW R26, M(TLBPHYS1)
386 MOVW 4(R27), R26 /* R26 = m->stb[hash].phys0 */
387 MOVW R26, M(TLBPHYS0)
392 MOVW 0(R27), R27 /* R27 = m->stb[hash].virt */
393 BEQ R27, stlbm /* nothing cached? do it the hard way */
395 /* is the stlb entry for the right virtual address? */
396 BNE R26, R27, stlbm /* M(TLBVIRT) != m->stb[hash].virt? */
399 /* if an entry exists, overwrite it, else write a random one */
401 MOVW R27, M(PAGEMASK) /* select page size */
407 BGEZ R26, utlbindex /* if tlb entry found, rewrite it */
414 TLBWI /* write indexed tlb entry */
419 MOVW 12(R26), R27 /* R27 = m->splpc */
426 MOVW 12(R26), R27 /* R27 = m->splpc */
430 TEXT gevector(SB), $-4
436 MOVW SP, R26 /* delay slot, old SP in R26 */
440 SUBU $UREGSIZE, SP /* delay slot, allocate frame on kernel stack */
442 wasuser: /* get kernel stack for this user process */
444 MOVW 8(SP), SP /* m->proc */
445 MOVW 8(SP), SP /* m->proc->kstack */
446 ADDU $(KSTACK-UREGSIZE), SP
452 MOVW R26, 0x10(SP) /* delay slot, save old SP */
455 MOVW 8(R(MACH)), R(USER) /* R24 = m->proc */
456 MOVW $setR30(SB), R30
458 BEQ R26, dosys /* set by saveregs() */
462 MOVW $forkret(SB), R31
464 MOVW 4(SP), R1 /* delay slot, first arg to trap() */
468 MOVW 4(SP), R1 /* delay slot, first arg to syscall() */
472 TEXT forkret(SB), $-4
473 JAL restregs(SB) /* restores old PC in R26 */
474 MOVW 0x14(SP), R1 /* delay slot, CAUSE */
479 MOVW 0x10(SP), SP /* delay slot */
482 * SP-> 0x00 --- (spill R31)
483 * 0x04 --- (trap()/syscall() arg1)
497 TEXT saveregs(SB), $-4
502 MOVW R1, 0x04(SP) /* arg to base of regs */
505 MOVW R1, M(STATUS) /* so we can take another trap */
511 AND $(EXCMASK<<2), R1
512 SUBU $(CSYS<<2), R1, R26
514 BEQ R26, notsaved /* is syscall? */
515 MOVW R27, 0x34(SP) /* delay slot */
555 MOVW R28, 0x30(SP) /* delay slot */
557 TEXT restregs(SB), $-4
558 AND $(EXCMASK<<2), R1
559 SUBU $(CSYS<<2), R1, R26
561 BEQ R26, notrestored /* is syscall? */
562 MOVW 0x34(SP), R27 /* delay slot */
598 MOVW 0x0C(SP), R26 /* old PC */
605 MOVW 0x9C(SP), R1 /* delay slot */
608 * hardware interrupt vectors
611 TEXT vector0(SB), $-4
613 CONST (SPBADDR+0x18, R26)
619 TEXT vector180(SB), $-4
621 CONST (SPBADDR+0x14, R26)
632 * floating-point stuff
635 TEXT clrfpintr(SB), $0
654 TEXT savefpregs(SB), $0
658 AND $~(0x3F<<12), R2, R4
684 TEXT restfpregs(SB), $0
690 MOVW fpstat+4(FP), R2
716 TEXT fcr31(SB), $0 /* fp csr */
721 * Emulate 68020 test and set: load linked / store conditional
726 MOVW R1, R2 /* address of key */
737 /* used by the semaphore implementation */
739 MOVW R1, R2 /* address of key */
740 MOVW old+4(FP), R3 /* old value */
741 MOVW new+8(FP), R4 /* new value */
742 LL(2, 1) /* R1 = (R2) */
747 SC(2, 1) /* (R2) = R1 if (R2) hasn't changed; R1 = success */
758 TEXT icflush(SB), $-4 /* icflush(virtaddr, count) */
764 ADDU R1, R9 /* R9 = last address */
766 AND R1, R8 /* R8 = first address, rounded down */
768 AND $(~0x3f), R9 /* round last address up */
769 SUBU R8, R9 /* R9 = revised count */
770 icflush1: /* primary cache line size is 16 bytes */
771 CACHE PD+HWB, 0x00(R8)
772 CACHE PI+HINV, 0x00(R8)
773 CACHE PD+HWB, 0x10(R8)
774 CACHE PI+HINV, 0x10(R8)
775 CACHE PD+HWB, 0x20(R8)
776 CACHE PI+HINV, 0x20(R8)
777 CACHE PD+HWB, 0x30(R8)
778 CACHE PI+HINV, 0x30(R8)
786 TEXT dcflush(SB), $-4 /* dcflush(virtaddr, count) */
792 ADDU R1, R9 /* R9 = last address */
794 AND R1, R8 /* R8 = first address, rounded down */
796 AND $(~0x3f), R9 /* round last address up */
797 SUBU R8, R9 /* R9 = revised count */
798 dcflush1: /* primary cache line size is 16 bytes */
799 CACHE PD+HWB, 0x00(R8)
800 CACHE PD+HWB, 0x10(R8)
801 CACHE PD+HWB, 0x20(R8)
802 CACHE PD+HWB, 0x30(R8)
825 * access to CP0 registers
841 TEXT wrcompare(SB), $0
845 TEXT rdcompare(SB), $0