#include "mem.h" #include "spim.s" /* * entrypoint. set SB, pass arguments to main(). * PMON's calling convention: * argc R4 * argv R5 * envp R6 * callvec R7 */ TEXT start(SB), $-4 MOVW $setR30(SB), R30 PUTC('9', R1, R2) /* don't enable any interrupts, out of EXL mode */ MOVW $CU1, R1 MOVW R1, M(STATUS) EHB MOVW R0, M(CAUSE) EHB MOVW R0, M(COMPARE) EHB MOVW R0, M(PERFCTL) EHB MOVW R4, _argc(SB) MOVW R5, _argv(SB) MOVW R6, _env(SB) MOVW R7, pmon_callvec(SB) MOVW $(FPOVFL|FPZDIV|FPINVAL|FPFLUSH), R1 MOVW R1, FCR31 // permit only inexact and underflow NOP MOVD $0.5, F26 SUBD F26, F26, F24 ADDD F26, F26, F28 ADDD F28, F28, F30 MOVD F24, F0 MOVD F24, F2 MOVD F24, F4 MOVD F24, F6 MOVD F24, F8 MOVD F24, F10 MOVD F24, F12 MOVD F24, F14 MOVD F24, F16 MOVD F24, F18 MOVD F24, F20 MOVD F24, F22 MOVW $TLBROFF, R1 MOVW R1, M(WIRED) EHB MOVW R0, M(CONTEXT) EHB /* set KSEG0 cachability before trying LL/SC in lock code */ TOKSEG1(7) MOVW M(CONFIG), R1 AND $(~CFG_K0), R1 /* make kseg0 cachable */ OR $(PTECACHABILITY>>3), R1 MOVW R1, M(CONFIG) EHB TOKSEG0(7) MOVW $MACHADDR, R(MACH) ADDU $(MACHSIZE-BY2WD), R(MACH), R29 /* set up stack */ JAL main(SB) /* main() returned */; PUTC('\r', R1, R2); DELAY(R2); PUTC('\n', R1, R2); DELAY(R2); PUTC('m', R1, R2); DELAY(R2); PUTC('a', R1, R2); DELAY(R2); PUTC('i', R1, R2); DELAY(R2); PUTC('n', R1, R2); DELAY(R2); PUTC('(', R1, R2); DELAY(R2); PUTC(')', R1, R2); DELAY(R2); PUTC(' ', R1, R2); DELAY(R2); PUTC('r', R1, R2); DELAY(R2); PUTC('e', R1, R2); DELAY(R2); PUTC('t', R1, R2); DELAY(R2); PUTC('u', R1, R2); DELAY(R2); PUTC('r', R1, R2); DELAY(R2); PUTC('n', R1, R2); DELAY(R2); PUTC('e', R1, R2); DELAY(R2); PUTC('d', R1, R2); DELAY(R2); PUTC('\r', R1, R2); DELAY(R2); PUTC('\n', R1, R2); JMP 0(PC) /* loop */ /* target for JALR in TOKSEG0/1 */ TEXT ret0(SB), $-4 AND $~KSEG1, R22 OR $KSEG0, R22 JMP (R22) TEXT ret1(SB), $-4 OR $KSEG1, R22 JMP (R22) /* print R1 in hex; clobbers R3—8 */ TEXT printhex(SB), $-4 MOVW $32, R5 MOVW $9, R7 prtop: SUB $4, R5 MOVW R1, R6 SRL R5, R6 AND $0xf, R6 SGTU R6, R7, R8 BEQ R8, prdec /* branch if R6 <= 9 */ ADD $('a'-10), R6 JMP prchar prdec: ADD $'0', R6 prchar: PUTC(R6, R3, R4) BNE R5, prtop RET /* * Take first processor into user mode * - argument is stack pointer to user */ TEXT touser(SB), $-4 MOVW R1, R29 MOVW $(UTZERO+32), R2 /* header appears in text */ MOVW R0, M(CAUSE) EHB MOVW M(STATUS), R4 AND $(~KMODEMASK), R4 OR $(KUSER|IE|EXL), R4 /* switch to user mode, intrs on, exc */ MOVW R4, M(STATUS) MOVW R2, M(EPC) ERET /* clears EXL */ TEXT _loop(SB), $-4 MOVW M(STATUS), R1 JAL printhex(SB) JMP 0(PC) /* * manipulate interrupts */ /* enable an interrupt; bit is in R1 */ TEXT intron(SB), $0 MOVW M(STATUS), R2 OR R1, R2 MOVW R2, M(STATUS) EHB RET /* disable an interrupt; bit is in R1 */ TEXT introff(SB), $0 MOVW M(STATUS), R2 XOR $-1, R1 AND R1, R2 MOVW R2, M(STATUS) EHB RET TEXT splhi(SB), $0 MOVW R31, 12(R(MACH)) /* save PC in m->splpc */ MOVW M(STATUS), R1 EHB AND $(~IE), R1, R2 MOVW R2, M(STATUS) EHB RET TEXT splx(SB), $0 MOVW R31, 12(R(MACH)) /* save PC in m->splpc */ MOVW M(STATUS), R2 EHB AND $IE, R1 AND $(~IE), R2 OR R2, R1 MOVW R1, M(STATUS) EHB RET TEXT spllo(SB), $0 MOVW M(STATUS), R1 EHB OR $IE, R1, R2 MOVW R2, M(STATUS) EHB RET TEXT spldone(SB), $0 RET TEXT islo(SB), $0 MOVW M(STATUS), R1 AND $IE, R1 RET TEXT coherence(SB), $-4 SYNC NOP NOP RET /* * process switching */ TEXT setlabel(SB), $-4 MOVW R29, 0(R1) /* sp */ MOVW R31, 4(R1) /* pc */ MOVW R0, R1 RET TEXT gotolabel(SB), $-4 MOVW 0(R1), R29 /* sp */ MOVW 4(R1), R31 /* pc */ MOVW $1, R1 RET TEXT machstatus(SB), $0 MOVW M(STATUS), R1 RET TEXT getstatus(SB), $0 MOVW M(STATUS), R1 RET TEXT setstatus(SB), $0 MOVW R1, M(STATUS) EHB RET TEXT rdcount(SB), $0 MOVW M(COUNT), R1 RET TEXT wrcount(SB), $0 MOVW R1, M(COUNT) EHB RET TEXT wrcompare(SB), $0 MOVW R1, M(COMPARE) EHB RET TEXT rdcompare(SB), $0 MOVW M(COMPARE), R1 RET TEXT prid(SB), $0 MOVW M(PRID), R1 RET TEXT getconfig(SB), $0 MOVW M(CONFIG), R1 RET TEXT getcause(SB), $0 MOVW M(CAUSE), R1 RET /* * the tlb routines need to be called at splhi. */ TEXT puttlb(SB), $0 /* puttlb(virt, phys0, phys1) */ MOVW R1, M(TLBVIRT) EHB MOVW 4(FP), R2 /* phys0 */ MOVW 8(FP), R3 /* phys1 */ MOVW R2, M(TLBPHYS0) EHB MOVW $PGSZ, R1 MOVW R3, M(TLBPHYS1) EHB MOVW R1, M(PAGEMASK) EHB OR R2, R3, R4 AND $PTEVALID, R4 TLBP /* tlb probe */ EHB MOVW M(INDEX), R1 BGEZ R1, index /* if tlb entry found, use it */ BEQ R4, dont /* not valid? cf. kunmap */ MOVW M(RANDOM), R1 /* write random tlb entry */ MOVW R1, M(INDEX) EHB index: TLBWI /* write indexed tlb entry */ EHB dont: RET TEXT getwired(SB),$0 MOVW M(WIRED), R1 RET TEXT setwired(SB),$0 MOVW R1, M(WIRED) EHB RET TEXT getrandom(SB),$0 MOVW M(RANDOM), R1 RET TEXT getpagemask(SB),$0 MOVW M(PAGEMASK), R1 RET TEXT setpagemask(SB),$0 MOVW R1, M(PAGEMASK) EHB MOVW R0, R1 /* prevent accidents */ RET TEXT puttlbx(SB), $0 /* puttlbx(index, virt, phys0, phys1, pagemask) */ MOVW 4(FP), R2 MOVW 8(FP), R3 MOVW 12(FP), R4 MOVW 16(FP), R5 MOVW R2, M(TLBVIRT) EHB MOVW R3, M(TLBPHYS0) EHB MOVW R4, M(TLBPHYS1) EHB MOVW R5, M(PAGEMASK) EHB MOVW R1, M(INDEX) EHB TLBWI EHB RET TEXT tlbvirt(SB), $0 MOVW M(TLBVIRT), R1 RET TEXT gettlbvirt(SB), $0 /* gettlbvirt(index) */ MOVW M(TLBVIRT), R10 /* save our asid */ MOVW R1, M(INDEX) EHB TLBR /* read indexed tlb entry */ EHB MOVW M(TLBVIRT), R1 MOVW R10, M(TLBVIRT) /* restore our asid */ EHB RET TEXT gettlbx(SB), $0 /* gettlbx(index, &entry) */ MOVW 4(FP), R5 MOVW M(TLBVIRT), R10 /* save our asid */ MOVW R1, M(INDEX) EHB TLBR /* read indexed tlb entry */ EHB MOVW M(TLBVIRT), R2 MOVW M(TLBPHYS0), R3 MOVW M(TLBPHYS1), R4 MOVW R2, 0(R5) MOVW R3, 4(R5) MOVW R4, 8(R5) MOVW R10, M(TLBVIRT) /* restore our asid */ EHB RET TEXT gettlbp(SB), $0 /* gettlbp(tlbvirt, &entry) */ MOVW 4(FP), R5 MOVW M(TLBVIRT), R10 /* save our asid */ MOVW R1, M(TLBVIRT) EHB TLBP /* probe tlb */ EHB MOVW M(INDEX), R1 BLTZ R1, gettlbp1 /* if no tlb entry found, return */ TLBR /* read indexed tlb entry */ EHB MOVW M(TLBVIRT), R2 MOVW M(TLBPHYS0), R3 MOVW M(TLBPHYS1), R4 MOVW M(PAGEMASK), R6 MOVW R2, 0(R5) MOVW R3, 4(R5) MOVW R4, 8(R5) MOVW R6, 12(R5) gettlbp1: MOVW R10, M(TLBVIRT) /* restore our asid */ EHB RET /* * exceptions. * mips promises that there will be no current hazards upon entry * to exception handlers. */ /* vector at KSEG0+0x80, simple tlb refill */ TEXT vector0(SB), $-4 MOVW $utlbmiss(SB), R26 JMP (R26) /* * compute stlb hash index. * must match index calculation in mmu.c/putstlb() * * M(TLBVIRT) [page & asid] in arg, result in arg. * stir in swizzled asid; we get best results with asid in both high & low bits. * * page = tlbvirt >> (PGSHIFT+1); // ignoring even/odd bit * R27 = ((tlbvirt<<(STLBLOG-8) ^ (uchar)tlbvirt ^ page ^ * ((page & (MASK(HIPFNBITS) << STLBLOG)) >> HIPFNBITS)) & * (STLBSIZE-1)) * 12; */ #define STLBHASH(arg, tmp, tmp2) \ MOVW arg, tmp2; \ SRL $(PGSHIFT+1), arg; /* move low page # bits to low bits */ \ CONST ((MASK(HIPFNBITS) << STLBLOG), tmp); \ AND arg, tmp; /* extract high page # bits */ \ SRL $HIPFNBITS, tmp; /* position them */ \ XOR tmp, arg; /* include them */ \ MOVW tmp2, tmp; /* asid in low byte */ \ SLL $(STLBLOG-8), tmp; /* move asid to high bits */ \ XOR tmp, arg; /* include asid in high bits too */ \ AND $0xff, tmp2, tmp; /* asid in low byte */ \ XOR tmp, arg; /* include asid in low bits */ \ CONST (STLBSIZE-1, tmp); \ AND tmp, arg /* chop to fit */ TEXT stlbhash(SB), $-4 STLBHASH(R1, R2, R3) RET TEXT utlbmiss(SB), $-4 /* * don't use R28 by using constants that span both word halves, * it's unsaved so far. avoid R24 (up in kernel) and R25 (m in kernel). */ /* update statistics */ CONST (MACHADDR, R26) /* R26 = m-> */ MOVW 16(R26), R27 ADDU $1, R27 MOVW R27, 16(R26) /* m->tlbfault++ */ MOVW R23, M(LLADDR) /* save R23, M(LLADDR) seems no meaning */ #ifdef KUTLBSTATS MOVW M(STATUS), R23 AND $KUSER, R23 BEQ R23, kmiss MOVW 24(R26), R27 ADDU $1, R27 MOVW R27, 24(R26) /* m->utlbfault++ */ JMP either kmiss: MOVW 20(R26), R27 ADDU $1, R27 MOVW R27, 20(R26) /* m->ktlbfault++ */ either: #endif /* compute stlb index */ EHB MOVW M(TLBVIRT), R27 /* asid in low byte */ STLBHASH(R27, R26, R23) MOVW M(LLADDR), R23 /* restore R23 */ /* scale to a byte index (multiply by 12) */ SLL $1, R27, R26 /* × 2 */ ADDU R26, R27 /* × 3 */ SLL $2, R27 /* × 12 */ CONST (MACHADDR, R26) /* R26 = m-> */ MOVW 4(R26), R26 /* R26 = m->stb */ ADDU R26, R27 /* R27 = &m->stb[hash] */ MOVW M(BADVADDR), R26 AND $BY2PG, R26 BNE R26, utlbodd /* odd page? */ utlbeven: MOVW 4(R27), R26 /* R26 = m->stb[hash].phys0 */ BEQ R26, stlbm /* nothing cached? do it the hard way */ MOVW R26, M(TLBPHYS0) EHB MOVW 8(R27), R26 /* R26 = m->stb[hash].phys1 */ MOVW R26, M(TLBPHYS1) EHB JMP utlbcom utlbodd: MOVW 8(R27), R26 /* R26 = m->stb[hash].phys1 */ BEQ R26, stlbm /* nothing cached? do it the hard way */ MOVW R26, M(TLBPHYS1) EHB MOVW 4(R27), R26 /* R26 = m->stb[hash].phys0 */ MOVW R26, M(TLBPHYS0) EHB utlbcom: MOVW M(TLBVIRT), R26 MOVW (R27), R27 /* R27 = m->stb[hash].virt */ BEQ R27, stlbm /* nothing cached? do it the hard way */ /* is the stlb entry for the right virtual address? */ BNE R26, R27, stlbm /* M(TLBVIRT) != m->stb[hash].virt? */ /* if an entry exists, overwrite it, else write a random one */ CONST (PGSZ, R27) MOVW R27, M(PAGEMASK) /* select page size */ EHB TLBP /* probe tlb */ EHB MOVW M(INDEX), R26 EHB BGEZ R26, utlindex /* if tlb entry found, rewrite it */ TLBWR /* else write random tlb entry */ ERET utlindex: TLBWI /* write indexed tlb entry */ ERET /* not in the stlb either; make trap.c figure it out */ stlbm: MOVW $exception(SB), R26 JMP (R26) /* vector at KSEG1+0x100, cache error */ TEXT vector100(SB), $-4 MOVW $exception(SB), R26 JMP (R26) /* vector at KSEG0+0x180, others */ TEXT vector180(SB), $-4 MOVW $exception(SB), R26 JMP (R26) TEXT exception(SB), $-4 MOVW M(STATUS), R26 AND $KUSER, R26, R27 BEQ R27, waskernel wasuser: MOVW R29, R27 CONST (MACHADDR, R29) /* m-> */ MOVW 8(R29), R29 /* m->proc */ MOVW 8(R29), R29 /* m->proc->kstack */ MOVW M(STATUS), R26 /* redundant load */ ADDU $(KSTACK-UREGSIZE), R29 MOVW R31, Ureg_r31(R29) JAL savereg1(SB) MOVW R30, Ureg_r30(R29) MOVW R(MACH), Ureg_r25(R29) MOVW R(USER), Ureg_r24(R29) MOVW $setR30(SB), R30 CONST (MACHADDR, R(MACH)) /* R(MACH) = m-> */ MOVW 8(R(MACH)), R(USER) /* up = m->proc */ AND $(EXCMASK<<2), R26, R1 /* R26 = M(CAUSE) from savereg1 */ SUBU $(CSYS<<2), R1 BNE R1, notsys MOVW R29, R1 /* first arg for syscall */ SUBU $Notuoffset, R29 JAL syscall(SB) sysrestore: ADDU $Notuoffset, R29 JAL restreg1(SB) MOVW Ureg_r31(R29), R31 MOVW Ureg_status(R29), R26 MOVW Ureg_r30(R29), R30 MOVW R26, M(STATUS) EHB MOVW Ureg_pc(R29), R26 /* old pc */ MOVW Ureg_sp(R29), R29 MOVW R26, M(EPC) ERET notsys: JAL savereg2(SB) MOVW R29, R1 /* first arg for trap */ SUBU $Notuoffset, R29 JAL trap(SB) ADDU $Notuoffset, R29 restore: JAL restreg1(SB) JAL restreg2(SB) /* restores R28, among others, R26 = old pc */ MOVW Ureg_r30(R29), R30 MOVW Ureg_r31(R29), R31 MOVW Ureg_r25(R29), R(MACH) MOVW Ureg_r24(R29), R(USER) MOVW Ureg_sp(R29), R29 MOVW R26, M(EPC) ERET waskernel: MOVW R29, R27 SUBU $UREGSIZE, R29 OR $7, R29 /* conservative rounding */ XOR $7, R29 MOVW R31, Ureg_r31(R29) JAL savereg1(SB) JAL savereg2(SB) MOVW R29, R1 /* first arg for trap */ SUBU $Notuoffset, R29 JAL trap(SB) ADDU $Notuoffset, R29 JAL restreg1(SB) JAL restreg2(SB) /* restores R28, among others, R26 = old pc */ MOVW Ureg_r31(R29), R31 MOVW Ureg_sp(R29), R29 MOVW R26, M(EPC) ERET TEXT forkret(SB), $0 MOVW R0, R1 JMP sysrestore /* * save mandatory registers. * called with old M(STATUS) in R26. * called with old SP in R27 * returns with M(CAUSE) in R26 */ TEXT savereg1(SB), $-4 MOVW R1, Ureg_r1(R29) MOVW $(~KMODEMASK), R1 /* don't use R28, it's unsaved so far */ AND R26, R1 MOVW R1, M(STATUS) /* kernel mode, no interrupt */ EHB MOVW R26, Ureg_status(R29) /* status */ MOVW R27, Ureg_sp(R29) /* user SP */ MOVW M(EPC), R1 MOVW M(CAUSE), R26 MOVW R23, Ureg_r23(R29) MOVW R22, Ureg_r22(R29) MOVW R21, Ureg_r21(R29) MOVW R20, Ureg_r20(R29) MOVW R19, Ureg_r19(R29) MOVW R1, Ureg_pc(R29) RET /* * all other registers. * called with M(CAUSE) in R26 */ TEXT savereg2(SB), $-4 MOVW R2, Ureg_r2(R29) MOVW M(BADVADDR), R2 MOVW R26, Ureg_cause(R29) MOVW M(TLBVIRT), R1 MOVW R2, Ureg_badvaddr(R29) MOVW R1, Ureg_tlbvirt(R29) MOVW HI, R1 MOVW LO, R2 MOVW R1, Ureg_hi(R29) MOVW R2, Ureg_lo(R29) /* LINK,SB,SP missing */ MOVW R28, Ureg_r28(R29) /* R27, R26 not saved */ /* R25, R24 missing */ /* R23- R19 saved in save1 */ MOVW R18, Ureg_r18(R29) MOVW R17, Ureg_r17(R29) MOVW R16, Ureg_r16(R29) MOVW R15, Ureg_r15(R29) MOVW R14, Ureg_r14(R29) MOVW R13, Ureg_r13(R29) MOVW R12, Ureg_r12(R29) MOVW R11, Ureg_r11(R29) MOVW R10, Ureg_r10(R29) MOVW R9, Ureg_r9(R29) MOVW R8, Ureg_r8(R29) MOVW R7, Ureg_r7(R29) MOVW R6, Ureg_r6(R29) MOVW R5, Ureg_r5(R29) MOVW R4, Ureg_r4(R29) MOVW R3, Ureg_r3(R29) RET /* restore R23-R19 */ TEXT restreg1(SB), $-4 MOVW Ureg_r23(R29), R23 MOVW Ureg_r22(R29), R22 MOVW Ureg_r21(R29), R21 MOVW Ureg_r20(R29), R20 MOVW Ureg_r19(R29), R19 RET /* * all other registers. * returns with pc in R26 */ TEXT restreg2(SB), $-4 /* LINK,SB,SP missing */ MOVW Ureg_r28(R29), R28 /* R27, R26 not saved */ /* R25, R24 missing */ /* R19- R23 restored in rest1 */ MOVW Ureg_r18(R29), R18 MOVW Ureg_r17(R29), R17 MOVW Ureg_r16(R29), R16 MOVW Ureg_r15(R29), R15 MOVW Ureg_r14(R29), R14 MOVW Ureg_r13(R29), R13 MOVW Ureg_r12(R29), R12 MOVW Ureg_r11(R29), R11 MOVW Ureg_r10(R29), R10 MOVW Ureg_r9(R29), R9 MOVW Ureg_r8(R29), R8 MOVW Ureg_r7(R29), R7 MOVW Ureg_r6(R29), R6 MOVW Ureg_r5(R29), R5 MOVW Ureg_r4(R29), R4 MOVW Ureg_r3(R29), R3 MOVW Ureg_lo(R29), R2 MOVW Ureg_hi(R29), R1 MOVW R2, LO MOVW R1, HI MOVW Ureg_status(R29), R1 MOVW Ureg_r2(R29), R2 MOVW R1, M(STATUS) /* could change interruptibility */ EHB MOVW Ureg_r1(R29), R1 /* BOTCH */ MOVW Ureg_pc(R29), R26 RET /* * floating point stuff */ TEXT savefpregs(SB), $0 MOVW FCR31, R2 MOVW M(STATUS), R3 AND $~FPEXCMASK, R2, R4 MOVW R4, FCR31 MOVD F0, (0*8)(R1) MOVD F2, (1*8)(R1) MOVD F4, (2*8)(R1) MOVD F6, (3*8)(R1) MOVD F8, (4*8)(R1) MOVD F10, (5*8)(R1) MOVD F12, (6*8)(R1) MOVD F14, (7*8)(R1) MOVD F16, (8*8)(R1) MOVD F18, (9*8)(R1) MOVD F20, (10*8)(R1) MOVD F22, (11*8)(R1) MOVD F24, (12*8)(R1) MOVD F26, (13*8)(R1) MOVD F28, (14*8)(R1) MOVD F30, (15*8)(R1) MOVW R2, (16*8)(R1) /* FCR31 */ AND $~CU1, R3 MOVW R3, M(STATUS) EHB RET TEXT restfpregs(SB), $0 /* restfpregs(fpsave, fpstatus) */ MOVW M(STATUS), R3 OR $CU1, R3 MOVW R3, M(STATUS) EHB MOVW 4(FP), R2 MOVW R2, FCR31 NOP MOVD (0*8)(R1), F0 MOVD (1*8)(R1), F2 MOVD (2*8)(R1), F4 MOVD (3*8)(R1), F6 MOVD (4*8)(R1), F8 MOVD (5*8)(R1), F10 MOVD (6*8)(R1), F12 MOVD (7*8)(R1), F14 MOVD (8*8)(R1), F16 MOVD (9*8)(R1), F18 MOVD (10*8)(R1), F20 MOVD (11*8)(R1), F22 MOVD (12*8)(R1), F24 MOVD (13*8)(R1), F26 MOVD (14*8)(R1), F28 MOVD (15*8)(R1), F30 AND $~CU1, R3 MOVW R3, M(STATUS) EHB RET TEXT fcr31(SB), $0 MOVW FCR31, R1 MOVW M(STATUS), R3 AND $~CU1, R3 MOVW R3, M(STATUS) EHB RET TEXT clrfpintr(SB), $0 MOVW M(STATUS), R3 OR $CU1, R3 MOVW R3, M(STATUS) EHB MOVW FCR31, R1 AND $~FPEXCMASK, R1, R2 MOVW R2, FCR31 AND $~CU1, R3 MOVW R3, M(STATUS) EHB RET /* * Emulate 68020 test and set: load linked / store conditional */ TEXT tas(SB), $0 MOVW R1, R2 /* address of key */ tas1: MOVW $1, R3 LL(2, 1) NOP SC(2, 3) NOP BEQ R3, tas1 RET TEXT ainc(SB), $0 MOVW R1, R2 /* address of counter */ loop: MOVW $1, R3 LL(2, 1) NOP ADDU R1, R3 MOVW R3, R1 /* return new value */ SC(2, 3) NOP BEQ R3, loop RET TEXT adec(SB), $0 SYNC NOP NOP MOVW R1, R2 /* address of counter */ loop1: MOVW $-1, R3 LL(2, 1) NOP ADDU R1, R3 MOVW R3, R1 /* return new value */ SC(2, 3) NOP BEQ R3, loop1 RET TEXT cmpswap(SB), $0 MOVW R1, R2 /* address of key */ MOVW old+4(FP), R3 /* old value */ MOVW new+8(FP), R4 /* new value */ LL(2, 1) /* R1 = (R2) */ NOP BNE R1, R3, fail MOVW R4, R1 SC(2, 1) /* (R2) = R1 if (R2) hasn't changed; R1 = success */ NOP RET fail: MOVW R0, R1 RET /* * cache manipulation */ /* * we avoided using R4, R5, R6, and R7 so gotopc can call us without saving * them, but gotopc is now gone. */ TEXT icflush(SB), $-4 /* icflush(virtaddr, count) */ MOVW M(STATUS), R10 /* old status -> R10 */ MOVW 4(FP), R9 MOVW R0, M(STATUS) /* intrs off */ EHB TOKSEG1(11) /* return to kseg1 (uncached) */ ADDU R1, R9 /* R9 = last address */ MOVW $(~(CACHELINESZ-1)), R8 AND R1, R8 /* R8 = first address, rounded down */ ADDU $(CACHELINESZ-1), R9 AND $(~(CACHELINESZ-1)), R9 /* round last address up */ SUBU R8, R9 /* R9 = revised count */ icflush1: // CACHE PD+HWB, (R8) /* flush D to ram */ // CACHE PI+HINV, (R8) /* invalidate in I */ CACHE SD+HWBI, (R8) /* flush & invalidate thru L2 */ SUBU $CACHELINESZ, R9 ADDU $CACHELINESZ, R8 BGTZ R9, icflush1 TOKSEG0(11) /* return to kseg0 (cached) */ MOVW R10, M(STATUS) EHB RET TEXT dcflush(SB), $-4 /* dcflush(virtaddr, count) */ MOVW M(STATUS), R10 /* old status -> R10 */ MOVW 4(FP), R9 MOVW R0, M(STATUS) /* intrs off */ EHB SYNC TOKSEG1(11) /* return to kseg1 (uncached) */ ADDU R1, R9 /* R9 = last address */ MOVW $(~(CACHELINESZ-1)), R8 AND R1, R8 /* R8 = first address, rounded down */ ADDU $(CACHELINESZ-1), R9 AND $(~(CACHELINESZ-1)), R9 /* round last address up */ SUBU R8, R9 /* R9 = revised count */ dcflush1: // CACHE PI+HINV, (R8) /* invalidate in I */ // CACHE PD+HWBI, (R8) /* flush & invalidate in D */ CACHE SD+HWBI, (R8) /* flush & invalidate thru L2 */ SUBU $CACHELINESZ, R9 ADDU $CACHELINESZ, R8 BGTZ R9, dcflush1 SYNC TOKSEG0(11) /* return to kseg0 (cached) */ MOVW R10, M(STATUS) EHB RET TEXT dcinvalid(SB), $-4 /* dcinvalid(virtaddr, count) */ MOVW M(STATUS), R10 /* old status -> R10 */ MOVW 4(FP), R9 MOVW R0, M(STATUS) /* intrs off */ EHB SYNC TOKSEG1(11) /* return to kseg1 (uncached) */ ADDU R1, R9 /* R9 = last address */ MOVW $(~(CACHELINESZ-1)), R8 AND R1, R8 /* R8 = first address, rounded down */ ADDU $(CACHELINESZ-1), R9 AND $(~(CACHELINESZ-1)), R9 /* round last address up */ SUBU R8, R9 /* R9 = revised count */ dcinvalid1: // CACHE PD+HINV, (R8) /* invalidate in D */ CACHE SD+HINV, (R8) /* invalidate thru L2 */ SUBU $CACHELINESZ, R9 ADDU $CACHELINESZ, R8 BGTZ R9, dcinvalid1 SYNC TOKSEG0(11) /* return to kseg0 (cached) */ MOVW R10, M(STATUS) EHB RET TEXT cleancache(SB), $-4 MOVW M(STATUS), R10 /* old status -> R10 */ MOVW R0, M(STATUS) /* intrs off */ EHB TOKSEG1(11) /* return to kseg1 (uncached) */ MOVW $KSEG0, R1 /* index, not address, kseg0 avoids tlb */ MOVW $(SCACHESIZE/4), R9 /* 4-way cache */ ccache: CACHE SD+IWBI, 0(R1) /* flush & invalidate thru L2 by index */ CACHE SD+IWBI, 1(R1) /* ways are least significant bits */ CACHE SD+IWBI, 2(R1) CACHE SD+IWBI, 3(R1) SUBU $CACHELINESZ, R9 ADDU $CACHELINESZ, R1 BGTZ R9, ccache SYNC TOKSEG0(11) /* return to kseg0 (cached) */ MOVW R10, M(STATUS) EHB RET /* * PMON routines, for early debugging. * wrapper converts PMON's calling convention to 0c's. * we need to leave CFSZ space to prevent stack corrupted */ #define CFSZ 48 // XXX DATA pmon_callvec(SB)/4, $0 /* pmon call vector */ GLOBL pmon_callvec(SB), $4 #define pmon_wrap(name, index) \ TEXT name(SB), $((CFSZ+12+7)&(~7)); \ MOVW R1, R4; \ MOVW arg2+4(FP), R5; \ MOVW arg3+8(FP), R6; \ MOVW arg4+12(FP), R7; \ MOVW R29, R2; \ AND $~7, R29; /* pmon needs R29 8-aligned */ \ MOVW R31, -4(SP); \ MOVW R30, -8(SP); \ MOVW R2, -12(SP); \ MOVW pmon_callvec(SB), R8; \ MOVW (4*(index))(R8), R8; \ JAL (R8); \ MOVW -8(SP), R30; \ MOVW -4(SP), R31; \ MOVW -12(SP), R29; \ MOVW R2, R1; \ MOVW R31, 0(R29); \ RET pmon_wrap(pmonprint, 5)