1/* 2 * arm v7 reboot code 3 * 4 * must fit in 11K to avoid stepping on PTEs; see mem.h. 5 * cache parameters are at CACHECONF. 6 */ 7#include "arm.s" 8 9/* 10 * All caches but L1 should be off before calling this. 11 * Turn off MMU, then copy the new kernel to its correct location 12 * in physical memory. Then jump to the start of the kernel. 13 */ 14 15/* main(PADDR(entry), PADDR(code), size); */ 16TEXT main(SB), 1, $-4 17 MOVW $setR12(SB), R12 18 MOVW R0, p1+0(FP) /* destination, passed in R0 */ 19 CPSID /* splhi */ 20 21PUTC('R') 22 BL cachesoff(SB) 23 /* now back in 29- or 26-bit addressing, mainly for SB */ 24 /* double mapping of PHYSDRAM & KZERO now in effect */ 25 26PUTC('e') 27 /* before turning MMU off, switch to PHYSDRAM-based addresses */ 28 DMB 29 30 MOVW $KSEGM, R7 /* clear segment bits */ 31 MOVW $PHYSDRAM, R0 /* set dram base bits */ 32 BIC R7, R12 /* adjust SB */ 33 ORR R0, R12 34 35 BL _r15warp(SB) 36 /* don't care about saving R14; we're not returning */ 37 38 /* 39 * now running in PHYSDRAM segment, not KZERO. 40 */ 41 42PUTC('b') 43 /* invalidate mmu mappings */ 44 MOVW $KZERO, R0 /* some valid virtual address */ 45 MTCP CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv 46 BARRIERS 47 48PUTC('o') 49 /* 50 * turn the MMU off 51 */ 52 MFCP CpSC, 0, R0, C(CpCONTROL), C(0) 53 BIC $CpCmmu, R0 54 MTCP CpSC, 0, R0, C(CpCONTROL), C(0) 55 BARRIERS 56 57PUTC('o') 58 /* copy in arguments from stack frame before moving stack */ 59 MOVW p2+4(FP), R4 /* phys source */ 60 MOVW n+8(FP), R5 /* byte count */ 61 MOVW p1+0(FP), R6 /* phys destination */ 62 63 /* set up a new stack for local vars and memmove args */ 64 MOVW R6, SP /* tiny trampoline stack */ 65 SUB $(0x20 + 4), SP /* back up before a.out header */ 66 67// MOVW R14, -48(SP) /* store return addr */ 68 SUB $48, SP /* allocate stack frame */ 69 70 MOVW R5, 40(SP) /* save count */ 71 MOVW R6, 44(SP) /* save dest/entry */ 72 73 /* copy the new kernel into place */ 74 DELAY(printloop2, 2) 75PUTC('t') 76 MOVW 40(SP), R5 /* restore count */ 77 MOVW 44(SP), R6 /* restore dest/entry */ 78 MOVW R6, 0(SP) /* normally saved LR goes here */ 79 MOVW R6, 4(SP) /* push dest */ 80 MOVW R6, R0 81 MOVW R4, 8(SP) /* push src */ 82 MOVW R5, 12(SP) /* push size */ 83 BL memmove(SB) 84 85PUTC('-') 86PUTC('>') 87 DELAY(printloopret, 1) 88PUTC('\r') 89 DELAY(printloopnl, 1) 90PUTC('\n') 91/* 92 * jump to kernel entry point. Note the true kernel entry point is 93 * the virtual address KZERO|R6, but this must wait until 94 * the MMU is enabled by the kernel in l.s 95 */ 96 MOVW 44(SP), R6 /* restore R6 (dest/entry) */ 97 ORR R6, R6 /* NOP: avoid link bug */ 98 B (R6) 99PUTC('?') 100PUTC('?') 101 B 0(PC) 102 103/* 104 * turn the caches off, double map PHYSDRAM & KZERO, invalidate TLBs, revert 105 * to tiny addresses. upon return, it will be safe to turn off the mmu. 106 */ 107TEXT cachesoff(SB), 1, $-4 108 MOVM.DB.W [R14,R1-R10], (R13) /* save regs on stack */ 109 CPSID 110 BARRIERS 111 112 SUB $12, SP /* paranoia */ 113 BL cacheuwbinv(SB) 114 ADD $12, SP /* paranoia */ 115 116 MFCP CpSC, 0, R0, C(CpCONTROL), C(0) 117 BIC $(CpCicache|CpCdcache), R0 118 MTCP CpSC, 0, R0, C(CpCONTROL), C(0) /* caches off */ 119 BARRIERS 120 121 /* 122 * caches are off 123 */ 124 125 /* invalidate stale TLBs before changing them */ 126 MOVW $KZERO, R0 /* some valid virtual address */ 127 MTCP CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv 128 BARRIERS 129 130 /* redo double map of PHYSDRAM, KZERO */ 131 MOVW $PHYSDRAM, R3 132 CMP $KZERO, R3 133 BEQ noun2map 134 MOVW $(L1+L1X(PHYSDRAM)), R4 /* address of PHYSDRAM's PTE */ 135 MOVW $PTEDRAM, R2 /* PTE bits */ 136 MOVW $DOUBLEMAPMBS, R5 137_ptrdbl: 138 ORR R3, R2, R1 /* first identity-map 0 to 0, etc. */ 139 MOVW R1, (R4) 140 ADD $4, R4 /* bump PTE address */ 141 ADD $MiB, R3 /* bump pa */ 142 SUB.S $1, R5 143 BNE _ptrdbl 144noun2map: 145 146 /* 147 * flush stale TLB entries 148 */ 149 150 BARRIERS 151 MOVW $KZERO, R0 /* some valid virtual address */ 152 MTCP CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv 153 BARRIERS 154 155 /* switch back to PHYSDRAM addressing, mainly for SB */ 156 MOVW $KSEGM, R7 /* clear segment bits */ 157 MOVW $PHYSDRAM, R0 /* set dram base bits */ 158 BIC R7, R12 /* adjust SB */ 159 ORR R0, R12 160 BIC R7, SP 161 ORR R0, SP 162 163 MOVM.IA.W (R13), [R14,R1-R10] /* restore regs from stack */ 164 165 MOVW $KSEGM, R0 /* clear segment bits */ 166 BIC R0, R14 /* adjust link */ 167 MOVW $PHYSDRAM, R0 /* set dram base bits */ 168 ORR R0, R14 169 170 RET 171 172TEXT _r15warp(SB), 1, $-4 173 BIC R7, R14 /* link */ 174 ORR R0, R14 175 176 BIC R7, R13 /* SP */ 177 ORR R0, R13 178 RET 179 180TEXT panic(SB), 1, $-4 /* stub */ 181PUTC('?') 182PUTC('!') 183 RET 184TEXT pczeroseg(SB), 1, $-4 /* stub */ 185 RET 186 187#include "cache.v7.s" 188 189/* modifies R0, R3—R6 */ 190TEXT printhex(SB), 1, $-4 191 MOVW R0, R3 192 MOVW $(32-4), R5 /* bits to shift right */ 193nextdig: 194 SRA R5, R3, R4 195 AND $0xf, R4 196 ADD $'0', R4 197 CMP.S $'9', R4 198 BLE nothex /* if R4 <= 9, jump */ 199 ADD $('a'-('9'+1)), R4 200nothex: 201 PUTC(R4) 202 SUB.S $4, R5 203 BGE nextdig 204 205 PUTC('\r') 206 PUTC('\n') 207 DELAY(proct, 50) 208 RET 209