xref: /plan9/sys/src/9/kw/l.s (revision 5c88beae5a9738eb333d417409242ad06a9c481a)
1/*
2 * sheevaplug machine assist
3 * arm926ej-s processor at 1.2GHz
4 *
5 * loader uses R11 as scratch.
6 * R9 and R10 are used for `extern register' variables.
7 *
8 * ARM v7 arch. ref. man. (I know, this is v5) §B1.3.3 that
9 * we don't need barriers around moves to CPSR.  The ARM v6 manual
10 * seems to be silent on the subject.
11 */
12#include "arm.s"
13
14/*
15 * MCR and MRC are counter-intuitively named.
16 *	MCR	coproc, opcode1, Rd, CRn, CRm[, opcode2]	# arm -> coproc
17 *	MRC	coproc, opcode1, Rd, CRn, CRm[, opcode2]	# coproc -> arm
18 */
19
20/*
21 * Entered here from Das U-Boot with MMU disabled.
22 * Until the MMU is enabled it is OK to call functions provided
23 * they are within ±32MiB relative and do not require any
24 * local variables or more than one argument (i.e. there is
25 * no stack).
26 */
27TEXT _start(SB), 1, $-4
28	MOVW	$setR12(SB), R12		/* load the SB */
29_main:
30	/* SVC mode, interrupts disabled */
31	MOVW	$(PsrDirq|PsrDfiq|PsrMsvc), R1
32	MOVW	R1, CPSR
33	BARRIERS
34
35	/*
36	 * disable the MMU & caches,
37	 * switch to system permission & 32-bit addresses.
38	 */
39	MOVW	$(CpCsystem|CpCd32|CpCi32), R1
40	MCR     CpSC, 0, R1, C(CpCONTROL), C(0)
41	ISB
42
43	/*
44	 * disable the Sheevaplug's L2 cache, invalidate all caches
45	 */
46
47	/* flush caches.  926ejs manual says we have to do it iteratively. */
48_dwbinv0:
49	MRC	CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
50	BNE	_dwbinv0
51	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
52	BARRIERS
53
54	/* make the l2 cache pay attention and disable resets */
55	MOVW	$(PHYSIO+0x20100), R1	/* CPUCSREG */
56	MOVW	$0, R0
57	MOVW	R0, 8(R1)		/* cpu->rstout = 0; */
58	MOVW	(4*10)(R1), R2
59	ORR	$(1<<3), R2		/* cpu->l2cfg |= L2exists */
60	MOVW	R2, (4*10)(R1)
61	ISB
62
63	/* invalidate l2 cache */
64	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
65	ISB
66
67	/* disable l2 cache.  do this while l1 caches are off */
68	MRC	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
69	/* disabling write allocation is probably for cortex-a8 errata 460075 */
70	/* l2 off, no wr alloc, no streaming */
71	BIC	$(CpTCl2ena | CpTCl2wralloc | CpTCldcstream), R1
72	MCR	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
73	BARRIERS
74
75	/* flush caches.  926ejs manual says we have to do it iteratively. */
76_dwbinv1:
77	MRC	CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
78	BNE	_dwbinv1
79	BARRIERS
80
81PUTC('\r')
82	/* turn off watchdog; see clock.c */
83	MOVW	$(PHYSIO+0x20300), R1	/* tmr = (TimerReg *)soc.clock; */
84	MOVW	$0, R0
85	MOVW	R0, 0(R1)		/* tmr->ctl = 0; */
86	BARRIERS
87
88	/* clear Mach */
89	MOVW	$0, R0
90	MOVW	$PADDR(MACHADDR), R4		/* address of Mach */
91_machZ:
92	MOVW	R0, (R4)
93	ADD	$4, R4				/* bump PTE address */
94	CMP.S	$PADDR(L1+L1X(0)), R4
95	BNE	_machZ
96
97	/*
98	 * set up the MMU page table
99	 */
100
101	/* clear all PTEs first, to provide a default */
102PUTC('\n')
103	MOVW	$PADDR(L1+L1X(0)), R4		/* address of PTE for 0 */
104_ptenv0:
105	ZEROPTE()
106	CMP.S	$PADDR(L1+16*KiB), R4
107	BNE	_ptenv0
108
109	/* double map of PHYSDRAM, KZERO to PHYSDRAM for first few MBs */
110	MOVW	$PTEDRAM, R2			/* PTE bits */
111	MOVW	$PHYSDRAM, R3			/* pa */
112	MOVW	$PADDR(L1+L1X(PHYSDRAM)), R4  /* address of PTE for PHYSDRAM */
113	MOVW	$16, R5
114_ptdbl:
115	FILLPTE()
116	SUB.S	$1, R5
117	BNE	_ptdbl
118
119	/*
120	 * back up and fill in PTEs for memory at KZERO
121	 * there is 1 bank of 512MB of SDRAM at PHYSDRAM
122	 */
123	MOVW	$PTEDRAM, R2			/* PTE bits */
124	MOVW	$PHYSDRAM, R3
125	MOVW	$PADDR(L1+L1X(KZERO)), R4	/* start with PTE for KZERO */
126	MOVW	$512, R5			/* inner loop count */
127_ptekrw:					/* set PTEs for 512MiB */
128	FILLPTE()
129	SUB.S	$1, R5
130	BNE	_ptekrw
131
132	/*
133	 * back up and fill in PTE for MMIO
134	 */
135	MOVW	$PTEIO, R2			/* PTE bits */
136	MOVW	$PHYSIO, R3
137	MOVW	$PADDR(L1+L1X(VIRTIO)), R4	/* start with PTE for VIRTIO */
138	FILLPTE()
139
140	/* mmu.c sets up the vectors later */
141
142	/*
143	 * set up a temporary stack; avoid data & bss segments
144	 */
145	MOVW	$(PHYSDRAM | (400*1024*1024)), R13
146
147PUTC('P')
148	/* set the domain access control */
149	MOVW	$Client, R0
150	BL	dacput(SB)
151
152	/* set the translation table base */
153	MOVW	$PADDR(L1), R0
154	BL	ttbput(SB)
155
156	MOVW	$0, R0
157	BL	pidput(SB)		/* paranoia */
158
159	/* the little dance to turn the MMU & caches on */
160PUTC('l')
161	BL	cacheuwbinv(SB)
162	BL	mmuinvalidate(SB)
163	BL	mmuenable(SB)
164
165PUTC('a')
166	/* warp the PC into the virtual map */
167	MOVW	$KZERO, R0
168	BL	_r15warp(SB)
169
170	/*
171	 * now running at KZERO+something!
172	 */
173
174	MOVW	$setR12(SB), R12		/* reload the SB */
175
176	/*
177	 * set up temporary stack again, in case we've just switched
178	 * to a new register set.
179	 */
180	MOVW	$(KZERO|(400*1024*1024)), R13
181
182	/* can now execute arbitrary C code */
183
184	BL	cacheuwbinv(SB)
185
186PUTC('n')
187	/* undo double map of 0, KZERO */
188	MOVW	$PADDR(L1+L1X(0)), R4		/* address of PTE for 0 */
189	MOVW	$0, R0
190	MOVW	$16, R5
191_ptudbl:
192	MOVW	R0, (R4)
193	ADD	$4, R4				/* bump PTE address */
194	ADD	$MiB, R0			/* bump pa */
195	SUB.S	$1, R5
196	BNE	_ptudbl
197	BARRIERS
198	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinvse
199	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
200	BARRIERS
201
202PUTC(' ')
203	/* pass Mach to main and set up the stack */
204	MOVW	$(MACHADDR), R0			/* Mach */
205	MOVW	R0, R13
206	ADD	$(MACHSIZE), R13		/* stack pointer */
207	SUB	$4, R13				/* space for link register */
208	BL	main(SB)			/* void main(Mach*) */
209	/* fall through */
210
211
212/* not used */
213TEXT _reset(SB), 1, $-4
214	/* turn the caches off */
215	MOVW	$(PsrDirq|PsrDfiq|PsrMsvc), R0
216	MOVW	R0, CPSR
217	BARRIERS
218	BL	cacheuwbinv(SB)
219	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
220	BIC	$(CpCwb|CpCicache|CpCdcache|CpCalign), R0
221	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
222	BARRIERS
223PUTC('R')
224
225	/* redo double map of 0, KZERO */
226	MOVW	$(L1+L1X(0)), R4		/* address of PTE for 0 */
227	MOVW	$PTEDRAM, R2			/* PTE bits */
228	MOVW	$0, R3
229	MOVW	$16, R5
230_ptrdbl:
231	ORR	R3, R2, R1		/* first identity-map 0 to 0, etc. */
232	MOVW	R1, (R4)
233	ADD	$4, R4				/* bump PTE address */
234	ADD	$MiB, R3			/* bump pa */
235	SUB.S	$1, R5
236	BNE	_ptrdbl
237
238	BARRIERS
239PUTC('e')
240	MOVW	$0, R0
241	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvd), CpTLBinv
242	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
243	BARRIERS
244
245	/* back to 29- or 26-bit addressing, mainly for SB */
246	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
247	BIC	$(CpCd32|CpCi32), R0
248	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
249	BARRIERS
250
251	/* turn the MMU off */
252	MOVW	$PHYSDRAM, R0
253	BL	_r15warp(SB)
254	BL	mmuinvalidate(SB)
255	BL	mmudisable(SB)
256
257PUTC('s')
258	/* set new reset vector */
259	MOVW	$0, R2
260	MOVW	$0xe59ff018, R3			/* MOVW 0x18(R15), R15 */
261	MOVW	R3, (R2)
262PUTC('e')
263
264	MOVW	$PHYSBOOTROM, R3
265	MOVW	R3, 0x20(R2)			/* where $0xe59ff018 jumps to */
266	BARRIERS
267PUTC('t')
268PUTC('\r')
269PUTC('\n')
270
271	/* ...and jump to it */
272	MOVW	R2, R15				/* software reboot */
273_limbo:						/* should not get here... */
274	B	_limbo				/* ... and can't get out */
275	BL	_div(SB)			/* hack to load _div, etc. */
276
277TEXT _r15warp(SB), 1, $-4
278	BIC	$KSEGM, R14
279	ORR	R0, R14
280	BIC	$KSEGM, R13
281	ORR	R0, R13
282	RET
283
284/* clobbers R1, R6 */
285TEXT myputc(SB), 1, $-4
286	MOVW	$PHYSCONS, R6
287_busy:
288	MOVW	20(R6), R1
289	BIC.S	$~(1<<5), R1			/* (x->lsr & LSRthre) == 0? */
290	BEQ	_busy
291	MOVW	R3, (R6)			/* print */
292	ISB
293	RET
294
295/*
296 * l1 caches
297 */
298
299TEXT l1cacheson(SB), 1, $-4
300	MOVW	CPSR, R5
301	ORR	$(PsrDirq|PsrDfiq), R5, R4
302	MOVW	R4, CPSR			/* splhi */
303
304	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
305	ORR	$(CpCdcache|CpCicache|CpCwb), R0
306	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
307	BARRIERS
308
309	MOVW	R5, CPSR			/* splx */
310	RET
311
312TEXT l1cachesoff(SB), 1, $-4
313	MOVM.DB.W [R14], (SP)			/* save lr on stack */
314
315	MOVW	CPSR, R5
316	ORR	$(PsrDirq|PsrDfiq), R5, R4
317	MOVW	R4, CPSR			/* splhi */
318
319	BL	cacheuwbinv(SB)
320
321	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
322	BIC	$(CpCdcache|CpCicache|CpCwb), R0
323	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
324	BARRIERS
325
326	MOVW	R5, CPSR			/* splx */
327	MOVM.IA.W (SP), [R14]			/* restore lr */
328	RET
329
330/*
331 * cache* functions affect only the L1 caches, which are VIVT.
332 */
333
334TEXT cachedwb(SB), 1, $-4			/* D writeback */
335	MOVW	CPSR, R3			/* splhi */
336	ORR	$(PsrDirq), R3, R1
337	MOVW	R1, CPSR
338
339	BARRIERS			/* force outstanding stores to cache */
340	/* keep writing back dirty cache lines until no more exist */
341_dwb:
342	MRC	CpSC, 0, PC, C(CpCACHE), C(CpCACHEwb), CpCACHEtest
343	BNE	_dwb
344	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
345	BARRIERS
346
347	MOVW	R3, CPSR			/* splx */
348	RET
349
350TEXT cachedwbse(SB), 1, $-4			/* D writeback SE */
351	MOVW	R0, R2				/* first arg: address */
352
353	MOVW	CPSR, R3			/* splhi */
354	ORR	$(PsrDirq), R3, R1
355	MOVW	R1, CPSR
356
357	BARRIERS			/* force outstanding stores to cache */
358	MOVW	4(FP), R1			/* second arg: size */
359
360//	CMP.S	$(4*1024), R1
361//	BGT	_dwb
362	ADD	R2, R1
363	BIC	$(CACHELINESZ-1), R2
364_dwbse:
365	MCR	CpSC, 0, R2, C(CpCACHE), C(CpCACHEwb), CpCACHEse
366	ADD	$CACHELINESZ, R2
367	CMP.S	R2, R1
368	BGT	_dwbse
369	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
370	BARRIERS
371
372	MOVW	R3, CPSR			/* splx */
373	RET
374
375TEXT cachedwbinv(SB), 1, $-4			/* D writeback+invalidate */
376	MOVW	CPSR, R3			/* splhi */
377	ORR	$(PsrDirq), R3, R1
378	MOVW	R1, CPSR
379
380	BARRIERS			/* force outstanding stores to cache */
381	/* keep writing back dirty cache lines until no more exist */
382_dwbinv:
383	MRC	CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
384	BNE	_dwbinv
385	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
386	BARRIERS
387
388	MOVW	R3, CPSR			/* splx */
389	RET
390
391TEXT cachedwbinvse(SB), 1, $-4			/* D writeback+invalidate SE */
392	MOVW	R0, R2				/* first arg: address */
393
394	MOVW	CPSR, R3			/* splhi */
395	ORR	$(PsrDirq), R3, R1
396	MOVW	R1, CPSR
397
398	BARRIERS			/* force outstanding stores to cache */
399	MOVW	4(FP), R1			/* second arg: size */
400
401	DSB
402//	CMP.S	$(4*1024), R1
403//	BGT	_dwbinv
404	ADD	R2, R1
405	BIC	$(CACHELINESZ-1), R2
406_dwbinvse:
407	MCR	CpSC, 0, R2, C(CpCACHE), C(CpCACHEwbi), CpCACHEse
408	ADD	$CACHELINESZ, R2
409	CMP.S	R2, R1
410	BGT	_dwbinvse
411	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
412	BARRIERS
413
414	MOVW	R3, CPSR			/* splx */
415	RET
416
417TEXT cachedinvse(SB), 1, $-4			/* D invalidate SE */
418	MOVW	R0, R2				/* first arg: address */
419
420	MOVW	CPSR, R3			/* splhi */
421	ORR	$(PsrDirq), R3, R1
422	MOVW	R1, CPSR
423
424	MOVW	4(FP), R1			/* second arg: size */
425
426	DSB
427//	CMP.S	$(4*1024), R1
428//	BGT	_dinv
429	ADD	R2, R1
430	BIC	$(CACHELINESZ-1), R2
431_dinvse:
432	MCR	CpSC, 0, R2, C(CpCACHE), C(CpCACHEinvd), CpCACHEse
433	ADD	$CACHELINESZ, R2
434	CMP.S	R2, R1
435	BGT	_dinvse
436	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
437	BARRIERS
438
439	MOVW	R3, CPSR			/* splx */
440	RET
441
442TEXT cacheuwbinv(SB), 1, $-4			/* D+I writeback+invalidate */
443	MOVW	CPSR, R3			/* splhi */
444	ORR	$(PsrDirq), R3, R1
445	MOVW	R1, CPSR
446
447	BARRIERS			/* force outstanding stores to cache */
448	/* keep writing back dirty cache lines until no more exist */
449_uwbinv:					/* D writeback+invalidate */
450	MRC	CpSC, 0, PC, C(CpCACHE), C(CpCACHEwbi), CpCACHEtest
451	BNE	_uwbinv
452	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
453	BARRIERS
454
455	MOVW	$0, R0				/* I invalidate */
456	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
457	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
458	BARRIERS
459
460	MOVW	R3, CPSR			/* splx */
461	RET
462
463TEXT cacheiinv(SB), 1, $-4			/* I invalidate */
464	BARRIERS
465	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
466	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
467	BARRIERS
468	RET
469
470TEXT cachedinv(SB), 1, $-4			/* D invalidate */
471_dinv:
472	BARRIERS
473	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEinvd), CpCACHEall
474	/* drain L1 write buffer, also drains L2 eviction buffer on sheeva */
475	BARRIERS
476	RET
477
478/*
479 * l2 cache
480 *
481 * these functions assume that the necessary l1 cache operations have been
482 * or will be done explicitly by the caller.
483 */
484
485/* enable l2 cache in config coproc. reg.  do this while l1 caches are off. */
486TEXT l2cachecfgon(SB), 1, $-4
487	BARRIERS
488	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
489	BARRIERS
490
491	MRC	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
492	ORR	$(CpTCl2ena | CpTCl2prefdis), R1  /* l2 on, prefetch off */
493	MCR	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
494	BARRIERS
495	RET
496
497/* disable l2 cache in config coproc. reg.  do this while l1 caches are off. */
498TEXT l2cachecfgoff(SB), 1, $-4
499	BARRIERS
500	MRC	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
501	BIC	$CpTCl2ena, R1
502	MCR	CpSC, CpL2, R1, C(CpTESTCFG), C(CpTCl2cfg), CpTCl2conf
503	BARRIERS
504
505	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
506	BARRIERS
507	RET
508
509TEXT l2cacheuwb(SB), 1, $-4			/* L2 unified writeback */
510	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
511	ISB
512	RET
513
514TEXT l2cacheuwbse(SB), 1, $-4			/* L2 unified writeback SE */
515	MOVW	R0, R2				/* first arg: address */
516
517	MOVW	CPSR, R3			/* splhi */
518	ORR	$(PsrDirq), R3, R1
519	MOVW	R1, CPSR
520
521	MOVW	4(FP), R1			/* second arg: size */
522
523	ADD	R2, R1
524	BIC	$(CACHELINESZ-1), R2
525_l2wbse:
526	MCR	CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
527	ADD	$CACHELINESZ, R2
528	CMP.S	R2, R1
529	BGT	_l2wbse
530	ISB
531
532	MOVW	R3, CPSR			/* splx */
533	RET
534
535TEXT l2cacheuwbinv(SB), 1, $-4		/* L2 unified writeback+invalidate */
536	MOVW	CPSR, R3			/* splhi */
537	ORR	$(PsrDirq), R3, R1
538	MOVW	R1, CPSR
539
540	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2flush), CpTCl2all
541	ISB
542	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
543	ISB
544
545	MOVW	R3, CPSR			/* splx */
546	RET
547
548TEXT l2cacheuwbinvse(SB), 1, $-4	/* L2 unified writeback+invalidate SE */
549	MOVW	R0, R2				/* first arg: address */
550
551	MOVW	CPSR, R3			/* splhi */
552	ORR	$(PsrDirq), R3, R1
553	MOVW	R1, CPSR
554
555	MOVW	4(FP), R1			/* second arg: size */
556
557	ADD	R2, R1
558	BIC	$(CACHELINESZ-1), R2
559_l2wbinvse:
560	MCR	CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2flush), CpTCl2seva
561	ISB
562	MCR	CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
563	ADD	$CACHELINESZ, R2
564	CMP.S	R2, R1
565	BGT	_l2wbinvse
566	ISB
567
568	MOVW	R3, CPSR			/* splx */
569	RET
570
571TEXT l2cacheuinv(SB), 1, $-4			/* L2 unified invalidate */
572	MCR	CpSC, CpL2, R0, C(CpTESTCFG), C(CpTCl2inv), CpTCl2all
573	ISB
574	RET
575
576TEXT l2cacheuinvse(SB), 1, $-4			/* L2 unified invalidate SE */
577	MOVW	R0, R2				/* first arg: address */
578
579	MOVW	CPSR, R3			/* splhi */
580	ORR	$(PsrDirq), R3, R1
581	MOVW	R1, CPSR
582
583	MOVW	4(FP), R1			/* second arg: size */
584
585	ADD	R2, R1
586	BIC	$(CACHELINESZ-1), R2
587_l2invse:
588	MCR	CpSC, CpL2, R2, C(CpTESTCFG), C(CpTCl2inv), CpTCl2seva
589	ADD	$CACHELINESZ, R2
590	CMP.S	R2, R1
591	BGT	_l2invse
592	ISB
593
594	MOVW	R3, CPSR			/* splx */
595	RET
596
597/*
598 *  enable mmu, i and d caches, and high vector
599 */
600TEXT mmuenable(SB), 1, $-4
601	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
602	ORR	$(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
603	BIC	$(CpCrom), R0
604	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
605	BARRIERS
606	RET
607
608TEXT mmudisable(SB), 1, $-4
609	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
610	BIC	$(CpChv|CpCmmu|CpCdcache|CpCicache|CpCwb), R0
611	MCR     CpSC, 0, R0, C(CpCONTROL), C(0)
612	BARRIERS
613	RET
614
615TEXT mmuinvalidate(SB), 1, $-4			/* invalidate all */
616	MOVW	$0, R0
617	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinv
618	BARRIERS
619	RET
620
621TEXT mmuinvalidateaddr(SB), 1, $-4		/* invalidate single entry */
622	MCR	CpSC, 0, R0, C(CpTLB), C(CpTLBinvu), CpTLBinvse
623	BARRIERS
624	RET
625
626TEXT cpidget(SB), 1, $-4			/* main ID */
627	MRC	CpSC, 0, R0, C(CpID), C(0), CpIDid
628	RET
629
630TEXT cpctget(SB), 1, $-4			/* cache type */
631	MRC	CpSC, 0, R0, C(CpID), C(0), CpIDct
632	RET
633
634TEXT controlget(SB), 1, $-4			/* control */
635	MRC	CpSC, 0, R0, C(CpCONTROL), C(0)
636	RET
637
638TEXT ttbget(SB), 1, $-4				/* translation table base */
639	MRC	CpSC, 0, R0, C(CpTTB), C(0)
640	RET
641
642TEXT ttbput(SB), 1, $-4				/* translation table base */
643	MCR	CpSC, 0, R0, C(CpTTB), C(0)
644	ISB
645	RET
646
647TEXT dacget(SB), 1, $-4				/* domain access control */
648	MRC	CpSC, 0, R0, C(CpDAC), C(0)
649	RET
650
651TEXT dacput(SB), 1, $-4				/* domain access control */
652	MCR	CpSC, 0, R0, C(CpDAC), C(0)
653	ISB
654	RET
655
656TEXT fsrget(SB), 1, $-4				/* fault status */
657	MRC	CpSC, 0, R0, C(CpFSR), C(0)
658	RET
659
660TEXT farget(SB), 1, $-4				/* fault address */
661	MRC	CpSC, 0, R0, C(CpFAR), C(0x0)
662	RET
663
664TEXT pidget(SB), 1, $-4				/* address translation pid */
665	MRC	CpSC, 0, R0, C(CpPID), C(0x0)
666	RET
667
668TEXT pidput(SB), 1, $-4				/* address translation pid */
669	MCR	CpSC, 0, R0, C(CpPID), C(0x0)
670	ISB
671	RET
672
673TEXT splhi(SB), 1, $-4
674	MOVW	$(MACHADDR+4), R2		/* save caller pc in Mach */
675	MOVW	R14, 0(R2)
676
677	MOVW	CPSR, R0			/* turn off interrupts */
678	ORR	$(PsrDirq), R0, R1
679	MOVW	R1, CPSR
680	RET
681
682TEXT spllo(SB), 1, $-4
683	MOVW	CPSR, R0
684	BIC	$(PsrDirq), R0, R1
685	MOVW	R1, CPSR
686	RET
687
688TEXT splx(SB), 1, $-4
689	MOVW	$(MACHADDR+0x04), R2		/* save caller pc in Mach */
690	MOVW	R14, 0(R2)
691
692	MOVW	R0, R1				/* reset interrupt level */
693	MOVW	CPSR, R0
694	MOVW	R1, CPSR
695	RET
696
697TEXT splxpc(SB), 1, $-4				/* for iunlock */
698	MOVW	R0, R1
699	MOVW	CPSR, R0
700	MOVW	R1, CPSR
701	RET
702
703TEXT spldone(SB), 1, $0
704	RET
705
706TEXT islo(SB), 1, $-4
707	MOVW	CPSR, R0
708	AND	$(PsrDirq), R0
709	EOR	$(PsrDirq), R0
710	RET
711
712TEXT splfhi(SB), $-4
713	MOVW	CPSR, R0
714	ORR	$(PsrDfiq|PsrDirq), R0, R1
715	MOVW	R1, CPSR
716	RET
717
718//TEXT splflo(SB), $-4
719//	MOVW	CPSR, R0
720//	BIC	$(PsrDfiq), R0, R1
721//	MOVW	R1, CPSR
722//	RET
723
724TEXT	tas(SB), $-4
725TEXT	_tas(SB), $-4
726	MOVW	R0,R1
727	MOVW	$1,R0
728	SWPW	R0,(R1)			/* fix: deprecated in armv7 */
729	RET
730
731//TEXT tas32(SB), 1, $-4
732//	MOVW	R0, R1
733//	MOVW	$0xDEADDEAD, R0
734//	MOVW	R0, R3
735//	SWPW	R0, (R1)
736//	CMP.S	R0, R3
737//	BEQ	_tasout
738//	EOR	R3, R3			/* R3 = 0 */
739//	CMP.S	R0, R3
740//	BEQ	_tasout
741//	MOVW	$1, R15			/* abort: lock != 0 && lock != $0xDEADDEAD */
742//_tasout:
743//	RET
744
745TEXT clz(SB), 1, $-4
746	CLZ(0, 0)			/* 0 is R0 */
747	RET
748
749TEXT setlabel(SB), 1, $-4
750	MOVW	R13, 0(R0)		/* sp */
751	MOVW	R14, 4(R0)		/* pc */
752	BARRIERS
753	MOVW	$0, R0
754	RET
755
756TEXT gotolabel(SB), 1, $-4
757	MOVW	0(R0), R13		/* sp */
758	MOVW	4(R0), R14		/* pc */
759	BARRIERS
760	MOVW	$1, R0
761	RET
762
763TEXT getcallerpc(SB), 1, $-4
764	MOVW	0(R13), R0
765	RET
766
767TEXT _idlehands(SB), 1, $-4
768	MOVW	CPSR, R3
769//	ORR	$PsrDirq, R3, R1		/* splhi */
770	BIC	$PsrDirq, R3, R1		/* spllo */
771	MOVW	R1, CPSR
772
773	MOVW	$0, R0				/* wait for interrupt */
774	MCR	CpSC, 0, R0, C(CpCACHE), C(CpCACHEintr), CpCACHEwait
775	ISB
776
777	MOVW	R3, CPSR			/* splx */
778	RET
779
780TEXT barriers(SB), 1, $-4
781	BARRIERS
782	RET
783