xref: /plan9-contrib/sys/src/9/bcm/rebootcode.s (revision 5c47fe09a0cc86dfb02c0ea4a2b6aec7eda2361f)
1/*
2 * armv6/armv7 reboot code
3 */
4#include "arm.s"
5
6#define PTEDRAM		(Dom0|L1AP(Krw)|Section)
7
8#define WFI	WORD	$0xe320f003	/* wait for interrupt */
9#define WFE	WORD	$0xe320f002	/* wait for event */
10
11/*
12 * CPU0:
13 *   main(PADDR(entry), PADDR(code), size);
14 * Copy the new kernel to its correct location in virtual memory.
15 * Then turn off the mmu and jump to the start of the kernel.
16 *
17 * Other CPUs:
18 *   main(0, soc.armlocal, 0);
19 * Turn off the mmu, wait for a restart address from CPU0, and jump to it.
20 */
21
22/* */
23TEXT	main(SB), 1, $-4
24	MOVW	$setR12(SB), R12
25
26	/* copy in arguments before stack gets unmapped */
27	MOVW	R0, R8			/* entry point */
28	MOVW	p2+4(FP), R7		/* source */
29	MOVW	n+8(FP), R6		/* byte count */
30
31	/* redo double map of first MiB PHYSDRAM = KZERO */
32	MOVW	12(R(MACH)), R2		/* m->mmul1 (virtual addr) */
33	MOVW	$PTEDRAM, R1		/* PTE bits */
34	MOVW	R1, (R2)
35	DSB
36	MCR	CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
37
38	/* invalidate stale TLBs */
39	BARRIERS
40	MOVW	$0, R0
41	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
42	BARRIERS
43
44	/* relocate PC to physical addressing */
45	MOVW	$_reloc(SB), R15
46
47TEXT _reloc(SB), $-4
48
49	/* continue with reboot only on cpu0 */
50	CPUID(R2)
51	BEQ	bootcpu
52
53	/* other cpus wait for inter processor interrupt from cpu0 */
54
55	/* turn caches off, invalidate icache */
56	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
57	BIC	$(CpCdcache|CpCicache), R1
58	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
59	MOVW	$0, R0
60	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
61 	/* turn off mmu */
62	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
63	BIC	$CpCmmu, R1
64	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
65	/* turn off SMP cache snooping */
66	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpAuxctl
67	BIC	$CpACsmp, R1
68	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpAuxctl
69	ISB
70	DSB
71	/* turn icache back on */
72	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
73	ORR	$(CpCicache), R1
74	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
75	BARRIERS
76
77dowfi:
78	WFE			/* wait for event signal */
79	MOVW	$0xCC(R7), R1	/* inter-core .startcpu mailboxes */
80	ADD	R2<<4, R1	/* mailbox for this core */
81	MOVW	0(R1), R8	/* content of mailbox */
82	CMP	$0, R8
83	BEQ	dowfi		/* if zero, wait again */
84	BL	(R8)		/* call received address */
85	B	dowfi		/* shouldn't return */
86
87bootcpu:
88	MOVW	$PADDR(MACHADDR+MACHSIZE-4), SP
89
90	/* copy the kernel to final destination */
91	MOVW	R8, R9		/* save physical entry point */
92	ADD	$KZERO, R8	/* relocate dest to virtual */
93	ADD	$KZERO, R7	/* relocate src to virtual */
94	ADD	$3, R6		/* round length to words */
95	BIC	$3, R6
96memloop:
97	MOVM.IA.W	(R7), [R1]
98	MOVM.IA.W	[R1], (R8)
99	SUB.S	$4, R6
100	BNE	memloop
101
102	/* clean dcache using appropriate code for armv6 or armv7 */
103	MRC	CpSC, 0, R1, C(CpID), C(CpIDfeat), 7	/* Memory Model Feature Register 3 */
104	TST	$0xF, R1	/* hierarchical cache maintenance? */
105	BNE	l2wb
106	DSB
107	MOVW	$0, R0
108	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEall
109	B	l2wbx
110l2wb:
111	BL		cachedwb(SB)
112	BL		l2cacheuwb(SB)
113l2wbx:
114
115	/* turn caches off, invalidate icache */
116	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
117	BIC	$(CpCdcache|CpCicache|CpCpredict), R1
118	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
119	DSB
120	MOVW	$0, R0
121	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
122	DSB
123 	/* turn off mmu */
124	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
125	BIC	$CpCmmu, R1
126	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
127	BARRIERS
128	/* turn off SMP cache snooping */
129	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpAuxctl
130	BIC	$CpACsmp, R1
131	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpAuxctl
132
133	/* invalidate dcache using appropriate code for armv6 or armv7 */
134	MRC	CpSC, 0, R1, C(CpID), C(CpIDfeat), 7	/* Memory Model Feature Register 3 */
135	TST	$0xF, R1	/* hierarchical cache maintenance */
136	BNE	l2inv
137	DSB
138	MOVW	$0, R0
139	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
140	B	l2invx
141l2inv:
142	BL		cachedinv(SB)
143	BL		l2cacheuinv(SB)
144l2invx:
145
146	/* jump to restart entry point */
147	MOVW	R9, R8
148	MOVW	$0, R9
149	B	(R8)
150
151#define ICACHELINESZ	32
152#include "cache.v7.s"
153