#define SYSREG(op0,op1,Cn,Cm,op2) SPR(((op0)<<19|(op1)<<16|(Cn)<<12|(Cm)<<8|(op2)<<5))
TEXT _start(SB), 1, $-4
+ MOV R0, R26 /* save */
+
MOV $setSB-KZERO(SB), R28
BL svcmode<>(SB)
MOV $(L1-KZERO), R0
BL mmu0init(SB)
- BL cachedwbinv(SB)
- BL l2cacheuwbinv(SB)
SEVL
_startup:
WFE
BL mmuenable<>(SB)
+ MOV R26, R0
MOV $0, R26
ORR $KZERO, R27
MSR R27, TPIDR_EL1
WFE
RETURN
-TEXT PUTC(SB), 1, $-4
- MOVWU $(0x3F000000+0x215040), R14
- MOVB R0, (R14)
- RETURN
-
TEXT svcmode<>(SB), 1, $-4
MSR $0xF, DAIFSet
MRS CurrentEL, R0
ORR $KZERO, LR
MOV LR, -16(RSP)!
- BL cachedwbinv(SB)
BL flushlocaltlb(SB)
/* memory attributes */
/* T0SZ */ | (64-EVASHIFT)<<0 )
MOV $TCRINIT, R1
MRS ID_AA64MMFR0_EL1, R2
- ANDW $0xF, R2 // IPS
- ADD R2<<32, R1
+ ANDW $0x7, R2 // PARange
+ ADD R2<<32, R1 // IPS
MSR R1, TCR_EL1
ISB $SY
MSR R0, TTBR0_EL1
DSB $ISH
ISB $SY
-
- B cacheiinv(SB)
+ RETURN
/*
* TLB maintenance operations.
ORR $(3<<32), R0 // type
_vserrpatch:
B _vserrpatch // branch to vtrapX() patched in
+
+/* fault-proof memcpy */
+TEXT peek(SB), 1, $-4
+ MOV R0, R1
+ MOV dst+8(FP), R2
+ MOVWU len+16(FP), R0
+TEXT _peekinst(SB), 1, $-4
+_peekloop:
+ MOVBU (R1)1!, R3
+ MOVBU R3, (R2)1!
+ SUBS $1, R0
+ BNE _peekloop
+ RETURN