xref: /plan9/sys/src/9/rb/l.s (revision 51f48f69b4c3e5c9d9f7955d28612ef2d4048ccc)
1/*
2 * mips 24k machine assist for routerboard rb450g
3 */
4#include "mem.h"
5#include "mips.s"
6
7#define SANITY 0x12345678
8
9	NOSCHED
10
11/*
12 * Boot only processor
13 */
14TEXT	start(SB), $-4
15	MOVW	$setR30(SB), R30
16
17PUTC('9', R1, R2)
18	DI(0)
19
20	MOVW	sanity(SB), R1
21	CONST(SANITY, R2)
22	SUBU	R1, R2, R2
23	BNE	R2, insane
24	NOP
25
26	MOVW	R0, M(COMPARE)
27	EHB
28
29	/* don't enable any interrupts nor FP, but leave BEV on. */
30	MOVW	$BEV,R1
31	MOVW	R1, M(STATUS)
32	UBARRIERS(7, R7, stshb)		/* returns to kseg1 space */
33	MOVW	R0, M(CAUSE)
34	EHB
35
36	/* silence the atheros watchdog */
37	MOVW	$(KSEG1|0x18060008), R1
38	MOVW	R0, (R1)			/* set no action */
39	SYNC
40
41	MOVW	$PE, R1
42	MOVW	R1, M(CACHEECC)		/* aka ErrCtl */
43	EHB
44	JAL	cleancache(SB)
45	NOP
46
47	MOVW	$TLBROFF, R1
48	MOVW	R1, M(WIRED)
49
50	MOVW	R0, M(CONTEXT)
51	EHB
52
53	/* set KSEG0 cachability before trying LL/SC in lock code */
54	MOVW	M(CONFIG), R1
55	AND	$~CFG_K0, R1
56	/* make kseg0 cachable, enable write-through merging */
57	OR	$((PTECACHABILITY>>3)|CFG_MM), R1
58	MOVW	R1, M(CONFIG)
59	BARRIERS(7, R7, cfghb)			/* back to kseg0 space */
60
61	MOVW	$setR30(SB), R30		/* again */
62
63	/* initialize Mach, including stack */
64	MOVW	$MACHADDR, R(MACH)
65	ADDU	$(MACHSIZE-BY2V), R(MACH), SP
66	MOVW	R(MACH), R1
67clrmach:
68	MOVW	R0, (R1)
69	ADDU	$BY2WD, R1
70	BNE	R1, SP, clrmach
71	NOP
72	MOVW	R0, 0(R(MACH))			/* m->machno = 0 */
73	MOVW	R0, R(USER)			/* up = nil */
74
75	/* zero bss, byte-by-byte */
76	MOVW	$edata(SB), R1
77	MOVW	$end(SB), R2
78clrbss:
79	MOVB	R0, (R1)
80	ADDU	$1, R1
81	BNE	R1, R2, clrbss
82	NOP
83
84	MOVW	$0x16, R16
85	MOVW	$0x17, R17
86	MOVW	$0x18, R18
87	MOVW	$0x19, R19
88	MOVW	$0x20, R20
89	MOVW	$0x21, R21
90	MOVW	$0x22, R22
91	MOVW	$0x23, R23
92
93	MOVW	R0, HI
94	MOVW	R0, LO
95
96PUTC('\r', R1, R2)
97PUTC('\n', R1, R2)
98	JAL	main(SB)
99	NOP
100	CONST(ROM, R1)
101	JMP	(R1)			/* back to the rom */
102
103#define PUT(c) PUTC(c, R1, R2)
104#define DELAY(lab) \
105	CONST(34000000, R3); \
106lab:	SUBU	$1, R3; \
107	BNE	R3, lab; \
108	NOP
109
110insane:
111	/*
112	 * data segment is misaligned; kernel needs vl -R4096 or -R16384,
113	 * as appropriate, for reboot.
114	 */
115	PUT('?'); PUT('d'); PUT('a'); PUT('t'); PUT('a'); PUT(' '); DELAY(dl1)
116	PUT('s'); PUT('e'); PUT('g'); PUT('m'); PUT('e'); PUT('n'); DELAY(dl2)
117	PUT('t'); PUT(' '); PUT('m'); PUT('i'); PUT('s'); PUT('a'); DELAY(dl3)
118	PUT('l'); PUT('i'); PUT('g'); PUT('n'); PUT('e'); PUT('d'); DELAY(dl4)
119	PUT('\r'); PUT('\n'); DELAY(dl5)
120	CONST(ROM, R1)
121	JMP	(R1)			/* back to the rom */
122	NOP
123
124/* target for JALRHB in BARRIERS */
125TEXT ret(SB), $-4
126	JMP	(R22)
127	NOP
128
129/* print R1 in hex; clobbers R3—8 */
130TEXT printhex(SB), $-4
131	MOVW	$32, R5
132	MOVW	$9, R7
133prtop:
134	SUB	$4, R5
135	MOVW	R1, R6
136	SRL	R5, R6
137	AND	$0xf, R6
138	SGTU	R6, R7, R8
139	BEQ	R8, prdec		/* branch if R6 <= 9 */
140	NOP
141	ADD	$('a'-10), R6
142	JMP	prchar
143	NOP
144prdec:
145	ADD	$'0', R6
146prchar:
147	PUTC(R6, R3, R4)
148	BNE	R5, prtop
149	NOP
150	RETURN
151
152/*
153 * Take first processor into user mode
154 * 	- argument is stack pointer to user
155 */
156TEXT	touser(SB), $-4
157	MOVW	R1, SP
158	MOVW	$(UTZERO+32), R2	/* header appears in text */
159	MOVW	R2, M(EPC)
160	EHB
161	MOVW	M(STATUS), R4
162	AND	$(~KMODEMASK), R4
163	OR	$(KUSER|IE|EXL), R4	/* switch to user mode, intrs on, exc */
164	MOVW	R4, M(STATUS)		/* " */
165	ERET				/* clears EXL */
166
167/*
168 * manipulate interrupts
169 */
170
171/* enable an interrupt; bit is in R1 */
172TEXT	intron(SB), $0
173	MOVW	M(STATUS), R2
174	OR	R1, R2
175	MOVW	R2, M(STATUS)
176	EHB
177	RETURN
178
179/* disable an interrupt; bit is in R1 */
180TEXT	introff(SB), $0
181	MOVW	M(STATUS), R2
182	XOR	$-1, R1
183	AND	R1, R2
184	MOVW	R2, M(STATUS)
185	EHB
186	RETURN
187
188/* on our 24k, wait instructions are not interruptible, alas. */
189TEXT	idle(SB), $-4
190	EI(1)				/* old M(STATUS) into R1 */
191	EHB
192	/* fall through */
193
194TEXT	wait(SB), $-4
195	WAIT
196	NOP
197
198	MOVW	R1, M(STATUS)		/* interrupts restored */
199	EHB
200	RETURN
201
202TEXT	splhi(SB), $0
203	EHB
204	MOVW	R31, 12(R(MACH))	/* save PC in m->splpc */
205	DI(1)				/* old M(STATUS) into R1 */
206	EHB
207	RETURN
208
209TEXT	splx(SB), $0
210	EHB
211	MOVW	R31, 12(R(MACH))	/* save PC in m->splpc */
212	MOVW	M(STATUS), R2
213	AND	$IE, R1
214	AND	$~IE, R2
215	OR	R2, R1
216	MOVW	R1, M(STATUS)
217	EHB
218	RETURN
219
220TEXT	spllo(SB), $0
221	EHB
222	EI(1)				/* old M(STATUS) into R1 */
223	EHB
224	RETURN
225
226TEXT	spldone(SB), $0
227	RETURN
228
229TEXT	islo(SB), $0
230	MOVW	M(STATUS), R1
231	AND	$IE, R1
232	RETURN
233
234TEXT	coherence(SB), $-4
235	BARRIERS(7, R7, cohhb)
236	SYNC
237	EHB
238	RETURN
239
240/*
241 * process switching
242 */
243
244TEXT	setlabel(SB), $-4
245	MOVW	R29, 0(R1)
246	MOVW	R31, 4(R1)
247	MOVW	R0, R1
248	RETURN
249
250TEXT	gotolabel(SB), $-4
251	MOVW	0(R1), R29
252	MOVW	4(R1), R31
253	MOVW	$1, R1
254	RETURN
255
256/*
257 * the tlb routines need to be called at splhi.
258 */
259
260TEXT	puttlb(SB), $0			/* puttlb(virt, phys0, phys1) */
261	EHB
262	MOVW	R1, M(TLBVIRT)
263	EHB
264	MOVW	4(FP), R2		/* phys0 */
265	MOVW	8(FP), R3		/* phys1 */
266	MOVW	R2, M(TLBPHYS0)
267	EHB
268	MOVW	$PGSZ, R1
269	MOVW	R3, M(TLBPHYS1)
270	EHB
271	MOVW	R1, M(PAGEMASK)
272	OR	R2, R3, R4		/* MTC0 delay slot */
273	AND	$PTEVALID, R4		/* MTC0 delay slot */
274	EHB
275	TLBP				/* tlb probe */
276	EHB
277	MOVW	M(INDEX), R1
278	BGEZ	R1, index		/* if tlb entry found, use it */
279	NOP
280	BEQ	R4, dont		/* not valid? cf. kunmap */
281	NOP
282	MOVW	M(RANDOM), R1		/* write random tlb entry */
283	MOVW	R1, M(INDEX)
284index:
285	EHB
286	TLBWI				/* write indexed tlb entry */
287	JRHB(31)			/* return and clear all hazards */
288dont:
289	RETURN
290
291TEXT	getwired(SB),$0
292	MOVW	M(WIRED), R1
293	RETURN
294
295TEXT	setwired(SB),$0
296	MOVW	R1, M(WIRED)
297	EHB
298	RETURN
299
300TEXT	getrandom(SB),$0
301	MOVW	M(RANDOM), R1
302	RETURN
303
304TEXT	getpagemask(SB),$0
305	MOVW	M(PAGEMASK), R1
306	RETURN
307
308TEXT	setpagemask(SB),$0
309	EHB
310	MOVW	R1, M(PAGEMASK)
311	EHB
312	MOVW	R0, R1			/* prevent accidents */
313	RETURN
314
315TEXT	puttlbx(SB), $0	/* puttlbx(index, virt, phys0, phys1, pagemask) */
316	MOVW	4(FP), R2
317	MOVW	8(FP), R3
318	MOVW	12(FP), R4
319	MOVW	16(FP), R5
320	EHB
321	MOVW	R2, M(TLBVIRT)
322	EHB
323	MOVW	R3, M(TLBPHYS0)
324	MOVW	R4, M(TLBPHYS1)
325	MOVW	R5, M(PAGEMASK)
326	EHB
327	MOVW	R1, M(INDEX)
328	EHB
329	TLBWI				/* write indexed tlb entry */
330	JRHB(31)			/* return and clear all hazards */
331
332TEXT	tlbvirt(SB), $0
333	EHB
334	MOVW	M(TLBVIRT), R1
335	EHB
336	RETURN
337
338TEXT	gettlbx(SB), $0			/* gettlbx(index, &entry) */
339	MOVW	4(FP), R5
340	MOVW	M(TLBVIRT), R10		/* save our asid */
341	EHB
342	MOVW	R1, M(INDEX)
343	EHB
344	TLBR				/* read indexed tlb entry */
345	EHB
346	MOVW	M(TLBVIRT), R2
347	MOVW	M(TLBPHYS0), R3
348	MOVW	M(TLBPHYS1), R4
349	MOVW	R2, 0(R5)
350	MOVW	R3, 4(R5)
351	MIPS24KNOP
352	MOVW	R4, 8(R5)
353	EHB
354	MOVW	R10, M(TLBVIRT)		/* restore our asid */
355	EHB
356	RETURN
357
358TEXT	gettlbp(SB), $0			/* gettlbp(tlbvirt, &entry) */
359	MOVW	4(FP), R5
360	MOVW	M(TLBVIRT), R10		/* save our asid */
361	EHB
362	MOVW	R1, M(TLBVIRT)
363	EHB
364	TLBP				/* probe tlb */
365	EHB
366	MOVW	M(INDEX), R1
367	BLTZ	R1, gettlbp1		/* if no tlb entry found, return */
368	NOP
369	EHB
370	TLBR				/* read indexed tlb entry */
371	EHB
372	MOVW	M(TLBVIRT), R2
373	MOVW	M(TLBPHYS0), R3
374	MOVW	M(TLBPHYS1), R4
375	MOVW	M(PAGEMASK), R6
376	MOVW	R2, 0(R5)
377	MOVW	R3, 4(R5)
378	MIPS24KNOP
379	MOVW	R4, 8(R5)
380	MOVW	R6, 12(R5)
381gettlbp1:
382	EHB
383	MOVW	R10, M(TLBVIRT)		/* restore our asid */
384	EHB
385	RETURN
386
387TEXT	gettlbvirt(SB), $0		/* gettlbvirt(index) */
388	MOVW	M(TLBVIRT), R10		/* save our asid */
389	EHB
390	MOVW	R1, M(INDEX)
391	EHB
392	TLBR				/* read indexed tlb entry */
393	EHB
394	MOVW	M(TLBVIRT), R1
395	EHB
396	MOVW	R10, M(TLBVIRT)		/* restore our asid */
397	EHB
398	RETURN
399
400/*
401 * exceptions.
402 * mips promises that there will be no current hazards upon entry
403 * to exception handlers.
404 */
405
406TEXT	vector0(SB), $-4
407	MOVW	$utlbmiss(SB), R26
408	JMP	(R26)
409	NOP
410
411/*
412 * compute stlb hash index.
413 * must match index calculation in mmu.c/putstlb()
414 *
415 * M(TLBVIRT) [page & asid] in arg, result in arg.
416 * stir in swizzled asid; we get best results with asid in both high & low bits.
417 *
418 * page = tlbvirt >> (PGSHIFT+1);	// ignoring even/odd bit
419 * R27 = ((tlbvirt<<(STLBLOG-8) ^ (uchar)tlbvirt ^ page ^
420 *	((page & (MASK(HIPFNBITS) << STLBLOG)) >> HIPFNBITS)) &
421 *	(STLBSIZE-1)) * 12;
422 */
423#define STLBHASH(arg, tmp, tmp2) \
424	MOVW	arg, tmp2; \
425	SRL	$(PGSHIFT+1), arg;	/* move low page # bits to low bits */ \
426	CONST	((MASK(HIPFNBITS) << STLBLOG), tmp); \
427	AND	arg, tmp;		/* extract high page # bits */ \
428	SRL	$HIPFNBITS, tmp;	/* position them */ \
429	XOR	tmp, arg;		/* include them */ \
430	MOVW	tmp2, tmp;		/* asid in low byte */ \
431	SLL	$(STLBLOG-8), tmp;	/* move asid to high bits */ \
432	XOR	tmp, arg;		/* include asid in high bits too */ \
433	AND	$0xff, tmp2, tmp;	/* asid in low byte */ \
434	XOR	tmp, arg;		/* include asid in low bits */ \
435	CONST	(STLBSIZE-1, tmp); \
436	AND	tmp, arg		/* chop to fit */
437
438TEXT	utlbmiss(SB), $-4
439	/*
440	 * don't use R28 by using constants that span both word halves,
441	 * it's unsaved so far.  avoid R24 (up in kernel) and R25 (m in kernel).
442	 */
443	/* update statistics */
444	CONST	(MACHADDR, R26)		/* R26 = m-> */
445	MOVW	16(R26), R27
446	ADDU	$1, R27
447	MOVW	R27, 16(R26)		/* m->tlbfault++ */
448
449	MOVW	R23, M(DESAVE)		/* save R23 */
450
451#ifdef	KUTLBSTATS
452	MOVW	M(STATUS), R23
453	AND	$KUSER, R23
454	BEQ	R23, kmiss
455
456	MOVW	24(R26), R27
457	ADDU	$1, R27
458	MOVW	R27, 24(R26)		/* m->utlbfault++ */
459	JMP	either
460kmiss:
461	MOVW	20(R26), R27
462	ADDU	$1, R27
463	MOVW	R27, 20(R26)		/* m->ktlbfault++ */
464either:
465#endif
466
467	/* compute stlb index */
468	EHB
469	MOVW	M(TLBVIRT), R27		/* asid in low byte */
470	STLBHASH(R27, R26, R23)
471	MOVW	M(DESAVE), R23		/* restore R23 */
472
473	/* scale to a byte index (multiply by 12) */
474	SLL	$1, R27, R26		/* × 2 */
475	ADDU	R26, R27		/* × 3 */
476	SLL	$2, R27			/* × 12 */
477
478	CONST	(MACHADDR, R26)		/* R26 = m-> */
479	MOVW	4(R26), R26		/* R26 = m->stb */
480	ADDU	R26, R27		/* R27 = &m->stb[hash] */
481
482	MOVW	M(BADVADDR), R26
483	AND	$BY2PG, R26
484	BNE	R26, utlbodd		/* odd page? */
485	NOP
486
487utlbeven:
488	MOVW	4(R27), R26		/* R26 = m->stb[hash].phys0 */
489	BEQ	R26, stlbm		/* nothing cached? do it the hard way */
490	NOP
491	MOVW	R26, M(TLBPHYS0)
492	EHB
493	MOVW	8(R27), R26		/* R26 = m->stb[hash].phys1 */
494	JMP	utlbcom
495	MOVW	R26, M(TLBPHYS1)	/* branch delay slot */
496
497utlbodd:
498	MOVW	8(R27), R26		/* R26 = m->stb[hash].phys1 */
499	BEQ	R26, stlbm		/* nothing cached? do it the hard way */
500	NOP
501	MOVW	R26, M(TLBPHYS1)
502	EHB
503	MOVW	4(R27), R26		/* R26 = m->stb[hash].phys0 */
504	MOVW	R26, M(TLBPHYS0)
505
506utlbcom:
507	EHB				/* MTC0/MFC0 hazard */
508	MOVW	M(TLBVIRT), R26
509	MOVW	(R27), R27		/* R27 = m->stb[hash].virt */
510	BEQ	R27, stlbm		/* nothing cached? do it the hard way */
511	NOP
512	/* is the stlb entry for the right virtual address? */
513	BNE	R26, R27, stlbm		/* M(TLBVIRT) != m->stb[hash].virt? */
514	NOP
515
516	/* if an entry exists, overwrite it, else write a random one */
517	CONST	(PGSZ, R27)
518	MOVW	R27, M(PAGEMASK)	/* select page size */
519	EHB
520	TLBP				/* probe tlb */
521	EHB
522	MOVW	M(INDEX), R26
523	BGEZ	R26, utlindex		/* if tlb entry found, rewrite it */
524	EHB				/* delay slot */
525	TLBWR				/* else write random tlb entry */
526	ERET
527utlindex:
528	TLBWI				/* write indexed tlb entry */
529	ERET
530
531/* not in the stlb either; make trap.c figure it out */
532stlbm:
533	MOVW	$exception(SB), R26
534	JMP	(R26)
535	NOP
536
537TEXT	stlbhash(SB), $-4
538	STLBHASH(R1, R2, R3)
539	RETURN
540
541TEXT	vector100(SB), $-4
542	MOVW	$exception(SB), R26
543	JMP	(R26)
544	NOP
545
546TEXT	vector180(SB), $-4
547	MOVW	$exception(SB), R26
548	JMP	(R26)
549	NOP
550
551TEXT	exception(SB), $-4
552	MOVW	M(STATUS), R26
553	AND	$KUSER, R26, R27
554	BEQ	R27, waskernel
555	MOVW	SP, R27			/* delay slot */
556
557wasuser:
558	CONST	(MACHADDR, SP)		/*  m-> */
559	MOVW	8(SP), SP		/*  m->proc */
560	MOVW	8(SP), SP		/*  m->proc->kstack */
561	MOVW	M(STATUS), R26		/* redundant load */
562	ADDU	$(KSTACK-UREGSIZE), SP
563	MOVW	R31, Ureg_r31(SP)
564
565	JAL	savereg1(SB)
566	NOP
567
568	MOVW	R30, Ureg_r30(SP)
569	MOVW	R(MACH), Ureg_r25(SP)
570	MIPS24KNOP
571	MOVW	R(USER), Ureg_r24(SP)
572
573	MOVW	$setR30(SB), R30
574	CONST	(MACHADDR, R(MACH))		/* R(MACH) = m-> */
575	MOVW	8(R(MACH)), R(USER)		/* up = m->proc */
576
577	AND	$(EXCMASK<<2), R26, R1
578	SUBU	$(CSYS<<2), R1
579	BNE	R1, notsys
580	NOP
581
582	/* the carrera does this: */
583//	ADDU	$8, SP, R1			/* first arg for syscall */
584
585	MOVW	SP, R1				/* first arg for syscall */
586	JAL	syscall(SB)
587	SUBU	$Notuoffset, SP			/* delay slot */
588sysrestore:
589	JAL	restreg1(SB)
590	ADDU	$Notuoffset, SP			/* delay slot */
591
592	MOVW	Ureg_r31(SP), R31
593	MOVW	Ureg_status(SP), R26
594	MOVW	Ureg_r30(SP), R30
595	MOVW	R26, M(STATUS)
596	EHB
597	MOVW	Ureg_pc(SP), R26		/* old pc */
598	MOVW	Ureg_sp(SP), SP
599	MOVW	R26, M(EPC)
600	ERET
601
602notsys:
603	JAL	savereg2(SB)
604	NOP
605
606	/* the carrera does this: */
607//	ADDU	$8, SP, R1			/* first arg for trap */
608
609	MOVW	SP, R1				/* first arg for trap */
610	JAL	trap(SB)
611	SUBU	$Notuoffset, SP			/* delay slot */
612
613	ADDU	$Notuoffset, SP
614
615restore:
616	JAL	restreg1(SB)
617	NOP
618	JAL	restreg2(SB)		/* restores R28, among others */
619	NOP
620
621	MOVW	Ureg_r30(SP), R30
622	MOVW	Ureg_r31(SP), R31
623	MOVW	Ureg_r25(SP), R(MACH)
624	MOVW	Ureg_r24(SP), R(USER)
625	MOVW	Ureg_sp(SP), SP
626	MOVW	R26, M(EPC)
627	ERET
628
629waskernel:
630	SUBU	$UREGSIZE, SP
631	OR	$7, SP				/* conservative rounding */
632	XOR	$7, SP
633	MOVW	R31, Ureg_r31(SP)
634
635	JAL	savereg1(SB)
636	NOP
637	JAL	savereg2(SB)
638	NOP
639
640	/* the carrera does this: */
641//	ADDU	$8, SP, R1			/* first arg for trap */
642
643	MOVW	SP, R1			/* first arg for trap */
644	JAL	trap(SB)
645	SUBU	$Notuoffset, SP			/* delay slot */
646
647	ADDU	$Notuoffset, SP
648
649	JAL	restreg1(SB)
650	NOP
651
652	/*
653	 * if about to return to `wait', interrupt arrived just before
654	 * executing wait, so move saved pc past it.
655	 */
656	MOVW	Ureg_pc(SP), R26
657	MOVW	R26, R31
658	MOVW	$wait(SB), R1
659	SUBU	R1, R31
660	BNE	R31, notwait
661	NOP
662	ADD	$BY2WD, R26		/* advance saved pc */
663	MOVW	R26, Ureg_pc(SP)
664notwait:
665	JAL	restreg2(SB)		/* restores R28, among others */
666	NOP
667
668	MOVW	Ureg_r31(SP), R31
669	MOVW	Ureg_sp(SP), SP
670	MOVW	R26, M(EPC)
671	ERET
672
673TEXT	forkret(SB), $0
674	JMP	sysrestore
675	MOVW	R0, R1			/* delay slot; child returns 0 */
676
677/*
678 * save mandatory registers.
679 * called with old M(STATUS) in R26.
680 * called with old SP in R27
681 * returns with M(CAUSE) in R26
682 */
683TEXT	savereg1(SB), $-4
684	MOVW	R1, Ureg_r1(SP)
685
686	MOVW	$(~KMODEMASK),R1	/* don't use R28, it's unsaved so far */
687	AND	R26, R1
688	MOVW	R1, M(STATUS)
689	EHB
690
691	MOVW	R26, Ureg_status(SP)	/* status */
692	MOVW	R27, Ureg_sp(SP)	/* user SP */
693
694	MOVW	M(EPC), R1
695	MOVW	M(CAUSE), R26
696
697	MOVW	R23, Ureg_r23(SP)
698	MOVW	R22, Ureg_r22(SP)
699	MIPS24KNOP
700	MOVW	R21, Ureg_r21(SP)
701	MOVW	R20, Ureg_r20(SP)
702	MIPS24KNOP
703	MOVW	R19, Ureg_r19(SP)
704	MOVW	R1, Ureg_pc(SP)
705	RETURN
706
707/*
708 * all other registers.
709 * called with M(CAUSE) in R26
710 */
711TEXT	savereg2(SB), $-4
712	MOVW	R2, Ureg_r2(SP)
713
714	MOVW	M(BADVADDR), R2
715	MOVW	R26, Ureg_cause(SP)
716	MOVW	M(TLBVIRT), R1
717	MOVW	R2, Ureg_badvaddr(SP)
718	MOVW	R1, Ureg_tlbvirt(SP)
719	MOVW	HI, R1
720	MOVW	LO, R2
721	MOVW	R1, Ureg_hi(SP)
722	MOVW	R2, Ureg_lo(SP)
723	MIPS24KNOP
724					/* LINK,SB,SP missing */
725	MOVW	R28, Ureg_r28(SP)
726					/* R27, R26 not saved */
727					/* R25, R24 missing */
728					/* R23- R19 saved in save1 */
729	MOVW	R18, Ureg_r18(SP)
730	MIPS24KNOP
731	MOVW	R17, Ureg_r17(SP)
732	MOVW	R16, Ureg_r16(SP)
733	MIPS24KNOP
734	MOVW	R15, Ureg_r15(SP)
735	MOVW	R14, Ureg_r14(SP)
736	MIPS24KNOP
737	MOVW	R13, Ureg_r13(SP)
738	MOVW	R12, Ureg_r12(SP)
739	MIPS24KNOP
740	MOVW	R11, Ureg_r11(SP)
741	MOVW	R10, Ureg_r10(SP)
742	MIPS24KNOP
743	MOVW	R9, Ureg_r9(SP)
744	MOVW	R8, Ureg_r8(SP)
745	MIPS24KNOP
746	MOVW	R7, Ureg_r7(SP)
747	MOVW	R6, Ureg_r6(SP)
748	MIPS24KNOP
749	MOVW	R5, Ureg_r5(SP)
750	MOVW	R4, Ureg_r4(SP)
751	MIPS24KNOP
752	MOVW	R3, Ureg_r3(SP)
753	RETURN
754
755TEXT	restreg1(SB), $-4
756	MOVW	Ureg_r23(SP), R23
757	MOVW	Ureg_r22(SP), R22
758	MOVW	Ureg_r21(SP), R21
759	MOVW	Ureg_r20(SP), R20
760	MOVW	Ureg_r19(SP), R19
761	RETURN
762
763TEXT	restreg2(SB), $-4
764					/* LINK,SB,SP missing */
765	MOVW	Ureg_r28(SP), R28
766					/* R27, R26 not saved */
767					/* R25, R24 missing */
768					/* R19- R23 restored in rest1 */
769	MOVW	Ureg_r18(SP), R18
770	MOVW	Ureg_r17(SP), R17
771	MOVW	Ureg_r16(SP), R16
772	MOVW	Ureg_r15(SP), R15
773	MOVW	Ureg_r14(SP), R14
774	MOVW	Ureg_r13(SP), R13
775	MOVW	Ureg_r12(SP), R12
776	MOVW	Ureg_r11(SP), R11
777	MOVW	Ureg_r10(SP), R10
778	MOVW	Ureg_r9(SP), R9
779	MOVW	Ureg_r8(SP), R8
780	MOVW	Ureg_r7(SP), R7
781	MOVW	Ureg_r6(SP), R6
782	MOVW	Ureg_r5(SP), R5
783	MOVW	Ureg_r4(SP), R4
784	MOVW	Ureg_r3(SP), R3
785	MOVW	Ureg_lo(SP), R2
786	MOVW	Ureg_hi(SP), R1
787	MOVW	R2, LO
788	MOVW	R1, HI
789
790	MOVW	Ureg_status(SP), R1
791	MOVW	Ureg_r2(SP), R2
792	MOVW	R1, M(STATUS)		/* could change interruptibility */
793	EHB
794	MOVW	Ureg_r1(SP), R1	/* BOTCH */
795	MOVW	Ureg_pc(SP), R26
796	RETURN
797
798#ifdef OLD_MIPS_EXAMPLE
799/* this appears to be a dreg from the distant past */
800TEXT	rfnote(SB), $0
801	MOVW	R1, R26			/* 1st arg is &uregpointer */
802	JMP	restore
803	SUBU	$(BY2WD), R26, SP	/* delay slot: pc hole */
804#endif
805
806/*
807 * degenerate floating-point stuff
808 */
809
810TEXT	clrfpintr(SB), $0
811	RETURN
812
813TEXT	savefpregs(SB), $0
814	RETURN
815
816TEXT	restfpregs(SB), $0
817	RETURN
818
819TEXT	fcr31(SB), $0			/* fp csr */
820	MOVW	R0, R1
821	RETURN
822
823/*
824 * Emulate 68020 test and set: load linked / store conditional
825 */
826
827TEXT	tas(SB), $0
828	MOVW	R1, R2		/* address of key */
829tas1:
830	MOVW	$1, R3
831	LL(2, 1)
832	NOP
833	SC(2, 3)
834	NOP
835	BEQ	R3, tas1
836	NOP
837	RETURN
838
839TEXT	_xinc(SB), $0
840	MOVW	R1, R2		/* address of counter */
841loop:
842	MOVW	$1, R3
843	LL(2, 1)
844	NOP
845	ADDU	R1, R3
846	MOVW	R3, R1		/* return new value */
847	SC(2, 3)
848	NOP
849	BEQ	R3, loop
850	NOP
851	RETURN
852
853TEXT	_xdec(SB), $0
854	SYNC
855	MOVW	R1, R2		/* address of counter */
856loop1:
857	MOVW	$-1, R3
858	LL(2, 1)
859	NOP
860	ADDU	R1, R3
861	MOVW	R3, R1		/* return new value */
862	SC(2, 3)
863	NOP
864	BEQ	R3, loop1
865	NOP
866	RETURN
867
868/* used by the semaphore implementation */
869TEXT cmpswap(SB), $0
870	MOVW	R1, R2		/* address of key */
871	MOVW	old+4(FP), R3	/* old value */
872	MOVW	new+8(FP), R4	/* new value */
873	LL(2, 1)		/* R1 = (R2) */
874	NOP
875	BNE	R1, R3, fail
876	NOP
877	MOVW	R4, R1
878	SC(2, 1)	/* (R2) = R1 if (R2) hasn't changed; R1 = success */
879	NOP
880	RETURN
881fail:
882	MOVW	R0, R1
883	RETURN
884
885/*
886 *  cache manipulation
887 */
888
889/*
890 *  we avoided using R4, R5, R6, and R7 so gotopc can call us without saving
891 *  them, but gotopc is now gone.
892 */
893TEXT	icflush(SB), $-4			/* icflush(virtaddr, count) */
894	MOVW	4(FP), R9
895	DI(10)				/* intrs off, old status -> R10 */
896	UBARRIERS(7, R7, ichb);		/* return to kseg1 (uncached) */
897	ADDU	R1, R9			/* R9 = last address */
898	MOVW	$(~(CACHELINESZ-1)), R8
899	AND	R1, R8			/* R8 = first address, rounded down */
900	ADDU	$(CACHELINESZ-1), R9
901	AND	$(~(CACHELINESZ-1)), R9	/* round last address up */
902	SUBU	R8, R9			/* R9 = revised count */
903icflush1:
904//	CACHE	PD+HWB, (R8)		/* flush D to ram */
905	CACHE	PI+HINV, (R8)		/* invalidate in I */
906	SUBU	$CACHELINESZ, R9
907	BGTZ	R9, icflush1
908	ADDU	$CACHELINESZ, R8	/* delay slot */
909
910	BARRIERS(7, R7, ic2hb);		/* return to kseg0 (cached) */
911	MOVW	R10, M(STATUS)
912	JRHB(31)			/* return and clear all hazards */
913
914TEXT	dcflush(SB), $-4			/* dcflush(virtaddr, count) */
915	MOVW	4(FP), R9
916	DI(10)				/* intrs off, old status -> R10 */
917	SYNC
918	EHB
919	ADDU	R1, R9			/* R9 = last address */
920	MOVW	$(~(CACHELINESZ-1)), R8
921	AND	R1, R8			/* R8 = first address, rounded down */
922	ADDU	$(CACHELINESZ-1), R9
923	AND	$(~(CACHELINESZ-1)), R9	/* round last address up */
924	SUBU	R8, R9			/* R9 = revised count */
925dcflush1:
926//	CACHE	PI+HINV, (R8)		/* invalidate in I */
927	CACHE	PD+HWBI, (R8)		/* flush & invalidate in D */
928	SUBU	$CACHELINESZ, R9
929	BGTZ	R9, dcflush1
930	ADDU	$CACHELINESZ, R8	/* delay slot */
931	SYNC
932	EHB
933	MOVW	R10, M(STATUS)
934	JRHB(31)			/* return and clear all hazards */
935
936/* the i and d caches may be different sizes, so clean them separately */
937TEXT	cleancache(SB), $-4
938	DI(10)				/* intrs off, old status -> R10 */
939
940	UBARRIERS(7, R7, cchb);		/* return to kseg1 (uncached) */
941	MOVW	R0, R1			/* index, not address */
942	MOVW	$ICACHESIZE, R9
943iccache:
944	CACHE	PI+IWBI, (R1)		/* flush & invalidate I by index */
945	SUBU	$CACHELINESZ, R9
946	BGTZ	R9, iccache
947	ADDU	$CACHELINESZ, R1	/* delay slot */
948
949	BARRIERS(7, R7, cc2hb);		/* return to kseg0 (cached) */
950
951	MOVW	R0, R1			/* index, not address */
952	MOVW	$DCACHESIZE, R9
953dccache:
954	CACHE	PD+IWBI, (R1)		/* flush & invalidate D by index */
955	SUBU	$CACHELINESZ, R9
956	BGTZ	R9, dccache
957	ADDU	$CACHELINESZ, R1	/* delay slot */
958
959	SYNC
960	MOVW	R10, M(STATUS)
961	JRHB(31)			/* return and clear all hazards */
962
963/*
964 * access to CP0 registers
965 */
966
967TEXT	prid(SB), $0
968	MOVW	M(PRID), R1
969	RETURN
970
971TEXT	rdcount(SB), $0
972	MOVW	M(COUNT), R1
973	RETURN
974
975TEXT	wrcount(SB), $0
976	MOVW	R1, M(COUNT)
977	EHB
978	RETURN
979
980TEXT	wrcompare(SB), $0
981	MOVW	R1, M(COMPARE)
982	EHB
983	RETURN
984
985TEXT	rdcompare(SB), $0
986	MOVW	M(COMPARE), R1
987	RETURN
988
989TEXT	getconfig(SB), $-4
990	MOVW	M(CONFIG), R1
991	RETURN
992
993TEXT	getconfig1(SB), $-4
994	MFC0(CONFIG, 1, 1)
995	RETURN
996
997TEXT	getconfig2(SB), $-4
998	MFC0(CONFIG, 2, 1)
999	RETURN
1000
1001TEXT	getconfig3(SB), $-4
1002	MFC0(CONFIG, 3, 1)
1003	RETURN
1004
1005TEXT	getconfig4(SB), $-4
1006	MFC0(CONFIG, 4, 1)
1007	RETURN
1008
1009TEXT	getconfig7(SB), $-4
1010	MFC0(CONFIG, 7, 1)
1011	RETURN
1012
1013TEXT	gethwreg3(SB), $-4
1014	RDHWR(3, 1)
1015	RETURN
1016
1017TEXT	getcause(SB), $-4
1018	MOVW	M(CAUSE), R1
1019	RETURN
1020
1021TEXT	C_fcr0(SB), $-4		/* fp implementation */
1022	MOVW	$0x500, R1	/* claim to be an r4k, thus have ll/sc */
1023	RETURN
1024
1025TEXT	getstatus(SB), $0
1026	MOVW	M(STATUS), R1
1027	RETURN
1028
1029TEXT	setstatus(SB), $0
1030	MOVW	R1, M(STATUS)
1031	EHB
1032	RETURN
1033
1034TEXT	setwatchhi0(SB), $0
1035	MOVW	R1, M(WATCHHI)
1036	EHB
1037	RETURN
1038
1039/*
1040 * beware that the register takes a double-word address, so it's not
1041 * precise to the individual instruction.
1042 */
1043TEXT	setwatchlo0(SB), $0
1044	MOVW	R1, M(WATCHLO)
1045	EHB
1046	RETURN
1047
1048TEXT	setsp(SB), $-4
1049	MOVW	R1, SP
1050	RETURN
1051
1052TEXT	getintctl(SB), $-4
1053	MFC0(STATUS, 1, 1)
1054	RETURN
1055
1056TEXT	getsrsctl(SB), $-4
1057	MFC0(STATUS, 2, 1)
1058	RETURN
1059
1060TEXT	getsrsmap(SB), $-4
1061	MFC0(STATUS, 3, 1)
1062	RETURN
1063
1064TEXT	getperfctl0(SB), $-4
1065	MFC0(PERFCOUNT, 0, 1)
1066	RETURN
1067
1068TEXT	getperfctl1(SB), $-4
1069	MFC0(PERFCOUNT, 2, 1)
1070	RETURN
1071
1072	GLOBL	sanity(SB), $4
1073	DATA	sanity(SB)/4, $SANITY
1074
1075	SCHED
1076