xref: /plan9-contrib/sys/src/9/bcm/armv6.s (revision 5c47fe09a0cc86dfb02c0ea4a2b6aec7eda2361f)
1/*
2 * Broadcom bcm2835 SoC, as used in Raspberry Pi
3 * arm1176jzf-s processor (armv6)
4 */
5
6#include "arm.s"
7
8#define CACHELINESZ 32
9
10TEXT armstart(SB), 1, $-4
11
12	/*
13	 * SVC mode, interrupts disabled
14	 */
15	MOVW	$(PsrDirq|PsrDfiq|PsrMsvc), R1
16	MOVW	R1, CPSR
17
18	/*
19	 * disable the mmu and L1 caches
20	 * invalidate caches and tlb
21	 */
22	MRC	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
23	BIC	$(CpCdcache|CpCicache|CpCpredict|CpCmmu), R1
24	MCR	CpSC, 0, R1, C(CpCONTROL), C(0), CpMainctl
25	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvu), CpCACHEall
26	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
27	ISB
28
29	/*
30	 * clear mach and page tables
31	 */
32	MOVW	$PADDR(MACHADDR), R1
33	MOVW	$PADDR(KTZERO), R2
34_ramZ:
35	MOVW	R0, (R1)
36	ADD	$4, R1
37	CMP	R1, R2
38	BNE	_ramZ
39
40	/*
41	 * start stack at top of mach (physical addr)
42	 * set up page tables for kernel
43	 */
44	MOVW	$PADDR(MACHADDR+MACHSIZE-4), R13
45	MOVW	$PADDR(L1), R0
46	BL	,mmuinit(SB)
47
48	/*
49	 * set up domain access control and page table base
50	 */
51	MOVW	$Client, R1
52	MCR	CpSC, 0, R1, C(CpDAC), C(0)
53	MOVW	$PADDR(L1), R1
54	MCR	CpSC, 0, R1, C(CpTTB), C(0)
55
56	/*
57	 * enable caches, mmu, and high vectors
58	 */
59	MRC	CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
60	ORR	$(CpChv|CpCdcache|CpCicache|CpCpredict|CpCmmu), R0
61	MCR	CpSC, 0, R0, C(CpCONTROL), C(0), CpMainctl
62	ISB
63
64	/*
65	 * switch SB, SP, and PC into KZERO space
66	 */
67	MOVW	$setR12(SB), R12
68	MOVW	$(MACHADDR+MACHSIZE-4), R13
69	MOVW	$_startpg(SB), R15
70
71TEXT _startpg(SB), 1, $-4
72
73	/*
74	 * enable cycle counter
75	 */
76	MOVW	$1, R1
77	MCR	CpSC, 0, R1, C(CpSPM), C(CpSPMperf), CpSPMctl
78
79	/*
80	 * call main and loop forever if it returns
81	 */
82	BL	,main(SB)
83	B	,0(PC)
84
85	BL	_div(SB)		/* hack to load _div, etc. */
86
87TEXT cpidget(SB), 1, $-4			/* main ID */
88	MRC	CpSC, 0, R0, C(CpID), C(0), CpIDid
89	RET
90
91TEXT fsrget(SB), 1, $-4				/* data fault status */
92	MRC	CpSC, 0, R0, C(CpFSR), C(0), CpFSRdata
93	RET
94
95TEXT ifsrget(SB), 1, $-4			/* instruction fault status */
96	MRC	CpSC, 0, R0, C(CpFSR), C(0), CpFSRinst
97	RET
98
99TEXT farget(SB), 1, $-4				/* fault address */
100	MRC	CpSC, 0, R0, C(CpFAR), C(0x0)
101	RET
102
103TEXT lcycles(SB), 1, $-4
104	MRC	CpSC, 0, R0, C(CpSPM), C(CpSPMperf), CpSPMcyc
105	RET
106
107TEXT splhi(SB), 1, $-4
108	MOVW	$(MACHADDR+4), R2		/* save caller pc in Mach */
109	MOVW	R14, 0(R2)
110
111	MOVW	CPSR, R0			/* turn off irqs (but not fiqs) */
112	ORR	$(PsrDirq), R0, R1
113	MOVW	R1, CPSR
114	RET
115
116TEXT splfhi(SB), 1, $-4
117	MOVW	$(MACHADDR+4), R2		/* save caller pc in Mach */
118	MOVW	R14, 0(R2)
119
120	MOVW	CPSR, R0			/* turn off irqs and fiqs */
121	ORR	$(PsrDirq|PsrDfiq), R0, R1
122	MOVW	R1, CPSR
123	RET
124
125TEXT splflo(SB), 1, $-4
126	MOVW	CPSR, R0			/* turn on fiqs */
127	BIC	$(PsrDfiq), R0, R1
128	MOVW	R1, CPSR
129	RET
130
131TEXT spllo(SB), 1, $-4
132	MOVW	CPSR, R0			/* turn on irqs and fiqs */
133	BIC	$(PsrDirq|PsrDfiq), R0, R1
134	MOVW	R1, CPSR
135	RET
136
137TEXT splx(SB), 1, $-4
138	MOVW	$(MACHADDR+0x04), R2		/* save caller pc in Mach */
139	MOVW	R14, 0(R2)
140
141	MOVW	R0, R1				/* reset interrupt level */
142	MOVW	CPSR, R0
143	MOVW	R1, CPSR
144	RET
145
146TEXT spldone(SB), 1, $0				/* end marker for devkprof.c */
147	RET
148
149TEXT islo(SB), 1, $-4
150	MOVW	CPSR, R0
151	AND	$(PsrDirq), R0
152	EOR	$(PsrDirq), R0
153	RET
154
155TEXT	tas(SB), $-4
156TEXT	_tas(SB), $-4
157	MOVW	R0,R1
158	MOVW	$1,R0
159	SWPW	R0,(R1)			/* fix: deprecated in armv6 */
160	RET
161
162TEXT setlabel(SB), 1, $-4
163	MOVW	R13, 0(R0)		/* sp */
164	MOVW	R14, 4(R0)		/* pc */
165	MOVW	$0, R0
166	RET
167
168TEXT gotolabel(SB), 1, $-4
169	MOVW	0(R0), R13		/* sp */
170	MOVW	4(R0), R14		/* pc */
171	MOVW	$1, R0
172	RET
173
174TEXT getcallerpc(SB), 1, $-4
175	MOVW	0(R13), R0
176	RET
177
178TEXT idlehands(SB), $-4
179	MOVW	CPSR, R3
180	ORR	$(PsrDirq|PsrDfiq), R3, R1		/* splfhi */
181	MOVW	R1, CPSR
182
183	DSB
184	MOVW	nrdy(SB), R0
185	CMP	$0, R0
186	MCR.EQ	CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
187	DSB
188
189	MOVW	R3, CPSR			/* splx */
190	RET
191
192
193TEXT coherence(SB), $-4
194	BARRIERS
195	RET
196
197/*
198 * invalidate tlb
199 */
200TEXT mmuinvalidate(SB), 1, $-4
201	MOVW	$0, R0
202	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
203	BARRIERS
204	MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEflushbtc
205	RET
206
207/*
208 * mmuinvalidateaddr(va)
209 *   invalidate tlb entry for virtual page address va, ASID 0
210 */
211TEXT mmuinvalidateaddr(SB), 1, $-4
212	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
213	BARRIERS
214	RET
215
216/*
217 * drain write buffer
218 * writeback data cache
219 */
220TEXT cachedwb(SB), 1, $-4
221	DSB
222	MOVW	$0, R0
223	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwb), CpCACHEall
224	RET
225
226/*
227 * drain write buffer
228 * writeback and invalidate data cache
229 */
230TEXT cachedwbinv(SB), 1, $-4
231	DSB
232	MOVW	$0, R0
233	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
234	RET
235
236/*
237 * cachedwbinvse(va, n)
238 *   drain write buffer
239 *   writeback and invalidate data cache range [va, va+n)
240 */
241TEXT cachedwbinvse(SB), 1, $-4
242	MOVW	R0, R1		/* DSB clears R0 */
243	DSB
244	MOVW	n+4(FP), R2
245	ADD	R1, R2
246	SUB	$1, R2
247	BIC	$(CACHELINESZ-1), R1
248	BIC	$(CACHELINESZ-1), R2
249	MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwbi)
250	RET
251
252/*
253 * cachedwbse(va, n)
254 *   drain write buffer
255 *   writeback data cache range [va, va+n)
256 */
257TEXT cachedwbtlb(SB), 1, $-4
258TEXT cachedwbse(SB), 1, $-4
259
260	MOVW	R0, R1		/* DSB clears R0 */
261	DSB
262	MOVW	n+4(FP), R2
263	ADD	R1, R2
264	BIC	$(CACHELINESZ-1), R1
265	BIC	$(CACHELINESZ-1), R2
266	MCRR(CpSC, 0, 2, 1, CpCACHERANGEdwb)
267	RET
268
269/*
270 * cachedinvse(va, n)
271 *   drain write buffer
272 *   invalidate data cache range [va, va+n)
273 */
274TEXT cachedinvse(SB), 1, $-4
275	MOVW	R0, R1		/* DSB clears R0 */
276	DSB
277	MOVW	n+4(FP), R2
278	ADD	R1, R2
279	SUB	$1, R2
280	BIC	$(CACHELINESZ-1), R1
281	BIC	$(CACHELINESZ-1), R2
282	MCRR(CpSC, 0, 2, 1, CpCACHERANGEinvd)
283	RET
284
285/*
286 * drain write buffer and prefetch buffer
287 * writeback and invalidate data cache
288 * invalidate instruction cache
289 */
290TEXT cacheuwbinv(SB), 1, $-4
291	BARRIERS
292	MOVW	$0, R0
293	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEwbi), CpCACHEall
294	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
295	RET
296
297/*
298 * L2 cache is not enabled
299 */
300TEXT l2cacheuwbinv(SB), 1, $-4
301	RET
302
303/*
304 * invalidate instruction cache
305 */
306TEXT cacheiinv(SB), 1, $-4
307	MOVW	$0, R0
308	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
309	RET
310
311/*
312 * invalidate range of instruction cache
313 */
314TEXT cacheiinvse(SB), 1, $-4
315	MOVW	R0, R1		/* DSB clears R0 */
316	DSB
317	MOVW n+4(FP), R2
318	ADD	R1, R2
319	SUB	$1, R2
320	MCRR(CpSC, 0, 2, 1, CpCACHERANGEinvi)
321	MCR CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEflushbtc
322	DSB
323	ISB
324	RET
325