xref: /plan9-contrib/sys/src/boot/vt4/l.s (revision da917039c7f233c1a27d212bf012c6afa758af39)
1/* virtex4 ppc405 machine assist */
2#include "ppc405.h"
3#include "define.h"
4
5/* special instruction definitions */
6#define	BDNZ	BC	16,0,
7#define	BDNE	BC	0,2,
8
9#define	TBRL	268	/* read time base lower in MFTB */
10#define	TBRU	269	/* read time base upper in MFTB */
11#define	MFTB(tbr,d)	WORD	$((31<<26)|((d)<<21)|((tbr&0x1f)<<16)|(((tbr>>5)&0x1f)<<11)|(371<<1))
12
13#define	TLBIA		WORD	$((31<<26)|(370<<1))
14#define	TLBSYNC		WORD	$((31<<26)|(566<<1))
15
16/* 400 models; perhaps others */
17#define	ICCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(966<<1))
18#define	DCCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(454<<1))
19/* these follow the source -> dest ordering */
20#define	DCREAD(s,t)	WORD	$((31<<26)|((t)<<21)|((s)<<11)|(486<<1))
21#define	TLBRELO(a,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|(1<<11)|(946<<1))
22#define	TLBREHI(a,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|(0<<11)|(946<<1))
23#define	TLBWELO(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(1<<11)|(978<<1))
24#define	TLBWEHI(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(0<<11)|(978<<1))
25#define	TLBSXF(a,b,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|((b)<<11)|(914<<1))
26#define	TLBSXCC(a,b,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|((b)<<11)|(914<<1)|1)
27#define	WRTMSR_EE(s)	WORD	$((31<<26)|((s)<<21)|(131<<1))
28#define	WRTMSR_EEI(e)	WORD	$((31<<26)|((e)<<15)|(163<<1))
29
30/* on some models mtmsr doesn't synchronise enough (eg, 603e) */
31#define	MSRSYNC	SYNC; ISYNC
32#define MSYNC	MSRSYNC
33
34/*
35 * on the 400 series, the prefetcher madly fetches across RFI, sys call,
36 * and others; use BR 0(PC) to stop it.
37 */
38#define	RFI	WORD	$((19<<26)|(50<<1)); BR 0(PC)
39#define	RFCI	WORD	$((19<<26)|(51<<1)); BR 0(PC)
40
41#define MFCCR0(r) WORD $((31<<26) | ((r)<<21) | (0x1d<<11) | (0x13<<16) | (339<<1))
42#define MTCCR0(r) WORD $((31<<26) | ((r)<<21) | (0x1d<<11) | (0x13<<16) | (467<<1))
43
44/* print progress character.  steps on R7 and R8, needs SB set. */
45#define PROG(c)	MOVW $(Uartlite+4), R7; MOVW $(c), R8; MOVW R8, 0(R7); SYNC
46
47	NOSCHED
48
49TEXT start<>(SB), 1, $-4
50	/* virtex4 CR 203746 patch for ppc405 errata cpu_213 */
51	MFCCR0(3)
52	OR	$0x50000000, R3
53	MTCCR0(3)
54
55	XORCC	R0, R0				/* from now on R0 == 0 */
56	MOVW	R0, CR
57
58	MOVW	R0, SPR(SPR_ESR)
59	/*
60	 * setup MSR
61	 * turn off interrupts & mmu
62	 * use 0x000 as exception prefix
63	 * enable machine check
64	 */
65	MOVW	$(MSR_ME), R1
66	ISYNC
67	MOVW	R1, MSR
68	MSYNC
69	ISYNC
70
71	/* setup SB for pre mmu */
72	MOVW	$setSB(SB), R2		/* SB until mmu on */
73
74PROG('\r')
75PROG('\n')
76
77	/*
78	 * Invalidate the caches.
79	 */
80//	ICCCI(0, 0)
81	MOVW	R0, SPR(SPR_ICCR)
82	ICCCI(0, 2)  /* errata cpu_121 reveals that EA is used; we'll use SB */
83	ISYNC
84	DCCCI(0, 0)
85	MSYNC
86
87	MOVW	$((DCACHEWAYSIZE/DCACHELINESZ)-1), R3
88	MOVW	R3, CTR
89	MOVW	R0, R3
90dcinv:
91	DCCCI(0,3)
92	ADD	$32, R3
93	BDNZ	dcinv
94
95	/*
96	 * cache is write-through; no user-defined 0; big endian throughout.
97	 * start with caches off until we have zeroed all of memory once.
98	 */
99	MOVW	$~0, R3
100	MOVW	R3, SPR(SPR_DCWR)	/* write-through everywhere*/
101	/* starting from the high bit, each bit represents 128MB */
102	MOVW	R0, R3			/* region bits */
103	MOVW	R3, SPR(SPR_DCCR)
104	MOVW	R3, SPR(SPR_ICCR)
105	ISYNC
106	MOVW	R0, SPR(SPR_SU0R)
107	MOVW	R0, SPR(SPR_SLER)
108	ISYNC
109
110	NOR	R3, R3		/* no speculative access in uncached mem */
111	MOVW	R3, SPR(SPR_SGR)
112	ISYNC
113
114	/*
115	 * set other system configuration values
116	 */
117	MOVW	R0, SPR(SPR_PIT)
118	MOVW	$~0, R3
119	MOVW	R3, SPR(SPR_TSR)
120
121
122	/* run the boot loader with the mmu off */
123
124	/*
125	 * invalidate the caches again to flush any addresses
126	 * below KZERO
127	 */
128	ICCCI(0, 0)
129	ISYNC
130
131	/*
132	 * Set up SB, vector space (16KiB, 64KiB aligned),
133	 * extern registers (m->, up->) and stack.
134	 * Mach (and stack) will be cleared along with the
135	 * rest of BSS below if this is CPU#0.
136	 * Memstart is the first free memory location
137	 * after the kernel.
138	 */
139	MOVW	$setSB(SB), R2			/* (SB) */
140
141PROG('P')
142PROG('l')
143
144	MOVW	$PHYSSRAM, R6			/* vectors at bottom of sram */
145	MOVW	R6, SPR(SPR_EVPR)
146
147	/* only one cpu, # zero */
148	/* sizeof(Mach) is currently 19*4 = 76 bytes */
149	MOVW	R6, R(MACH)			/* m-> before 1st vector */
150
151	MOVW	R0, R(USER)			/* up-> */
152	MOVW	$0xfffffffc, R1		/* put stack in sram temporarily */
153
154_CPU0:						/* boot processor */
155	MOVW	$edata-4(SB), R3
156	MOVW	R0, R4
157	SUB	$8, R4				/* sram end, below JMP */
158_clrbss:					/* clear BSS */
159	MOVWU	R0, 4(R3)
160	CMP	R3, R4
161	BNE	_clrbss
162
163	MOVW	R0, memstart(SB)	/* start of unused memory: dram */
164	MOVW	R6, vectorbase(SB) /* 64KiB aligned vector base, for trapinit */
165
166PROG('a')
167PROG('n')
168PROG(' ')
169	BL	main(SB)
170	BR	0(PC)
171	RETURN
172
173TEXT	cacheson(SB), 1, $-4
174	/* cache is write-through; no user-defined 0; big endian throughout */
175	MOVW	$~0, R3
176	MOVW	R3, SPR(SPR_DCWR)	/* write-through everywhere*/
177	/*
178	 * cache bottom 128MB (dram) & top 128MB (sram), but not I/O reg.s.
179	 * starting from the high bit, each bit represents another 128MB.
180	 */
181	MOVW	$(1<<31 | 1<<0), R3
182	MOVW	R3, SPR(SPR_DCCR)
183	MOVW	R3, SPR(SPR_ICCR)
184	ISYNC
185	MOVW	R0, SPR(SPR_SU0R)
186	MOVW	R0, SPR(SPR_SLER)
187	ISYNC
188
189	MOVW	R3, R4
190	NOR	R3, R3		/* no speculative access in uncached mem */
191	MOVW	R3, SPR(SPR_SGR)
192	ISYNC
193	MOVW	R4, R3		/* return value: true iff caches on */
194	RETURN
195
196TEXT	splhi(SB), 1, $-4
197	MOVW	MSR, R3
198	WRTMSR_EEI(0)
199//	MOVW	LR, R31
200//	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
201	RETURN
202
203TEXT	splx(SB), 1, $-4
204//	MOVW	LR, R31
205//	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
206	/* fall though */
207
208TEXT	splxpc(SB), 1, $-4
209	WRTMSR_EE(3)
210	RETURN
211
212TEXT	spllo(SB), 1, $-4
213	MOVW	MSR, R3
214	WRTMSR_EEI(1)
215	RETURN
216
217TEXT	spldone(SB), 1, $-4
218	RETURN
219
220TEXT	islo(SB), 1, $-4
221	MOVW	MSR, R3
222	RLWNM	$0, R3, $MSR_EE, R3
223	RETURN
224
225TEXT dcbi(SB), 1, $-4				/* dcbi(addr) */
226	DCBI	(R3)
227	RETURN
228
229TEXT	icflush(SB), 1, $-4	/* icflush(virtaddr, count) */
230	MOVW	n+4(FP), R4
231	RLWNM	$0, R3, $~(ICACHELINESZ-1), R5
232	SUB	R5, R3
233	ADD	R3, R4
234	ADD	$(ICACHELINESZ-1), R4
235	SRAW	$ICACHELINELOG, R4
236	MOVW	R4, CTR
237icf0:	ICBI	(R5)
238	ADD	$ICACHELINESZ, R5
239	BDNZ	icf0
240	ISYNC
241	RETURN
242
243TEXT sync(SB), 1, $-4				/* sync() */
244	SYNC
245	RETURN
246
247TEXT dcflush(SB), 1, $-4			/* dcflush(virtaddr, count) */
248	MOVW	n+4(FP), R4
249	RLWNM	$0, R3, $~(DCACHELINESZ-1), R5
250	CMP	R4, $0
251	BLE	dcf1
252	SUB	R5, R3
253	ADD	R3, R4
254	ADD	$(DCACHELINESZ-1), R4
255	SRAW	$DCACHELINELOG, R4
256	MOVW	R4, CTR
257dcf0:
258	DCBF	(R5)
259	ADD	$DCACHELINESZ, R5
260	BDNZ	dcf0
261dcf1:
262	SYNC
263	RETURN
264
265/* copied from ../vt5/l.s; hope it's right */
266TEXT	cachesinvalidate(SB), 1, $-4
267	ICCCI(0, 2) /* errata cpu_121 reveals that EA is used; we'll use SB */
268	DCCCI(0, 2) /* dcache must not be in use (or just needs to be clean?) */
269	MSYNC
270	RETURN
271
272TEXT	getpit(SB), 1, $0
273	MOVW	SPR(SPR_PIT), R3
274	RETURN
275
276TEXT	putpit(SB), 1, $0
277	MOVW	R3, SPR(SPR_PIT)
278	RETURN
279
280TEXT	putpid(SB), 1, $0
281	MOVW	R3, SPR(SPR_PID)
282	RETURN
283
284TEXT	getpid(SB), 1, $0
285	MOVW	SPR(SPR_PID), R3
286	RETURN
287
288/* 405s have no PIR, so use low bits of PVR, which rae can set. */
289TEXT	getpir(SB), 1, $-4
290	MOVW	SPR(SPR_PVR), R3
291	ANDCC	$017, R3
292	RETURN
293
294TEXT	gettbl(SB), 1, $0
295	MFTB(TBRL, 3)
296	RETURN
297
298TEXT	gettbu(SB), 1, $0
299	MFTB(TBRU, 3)
300	RETURN
301
302TEXT	gettsr(SB), 1, $0
303	MOVW	SPR(SPR_TSR), R3
304	RETURN
305
306TEXT	puttsr(SB), 1, $0
307	MOVW	R3, SPR(SPR_TSR)
308	RETURN
309
310TEXT	gettcr(SB), 1, $0
311	MOVW	SPR(SPR_TCR), R3
312	RETURN
313
314TEXT	puttcr(SB), 1, $0
315	MOVW	R3, SPR(SPR_TCR)
316	RETURN
317
318TEXT	getpvr(SB), 1, $0
319	MOVW	SPR(SPR_PVR), R3
320	RETURN
321
322TEXT	getmsr(SB), 1, $0
323	MOVW	MSR, R3
324	RETURN
325
326TEXT	putmsr(SB), 1, $0
327	SYNC
328	MOVW	R3, MSR
329	MSRSYNC
330	RETURN
331
332TEXT	getesr(SB), 1, $0
333	MOVW	SPR(SPR_ESR), R3
334	RETURN
335
336TEXT	putesr(SB), 1, $0
337	MOVW	R3, SPR(SPR_ESR)
338	RETURN
339
340TEXT	putevpr(SB), 1, $0
341	MOVW	R3, SPR(SPR_EVPR)
342	RETURN
343
344TEXT	setsp(SB), 1, $0
345	MOVW	R3, R1
346	RETURN
347
348TEXT	getdear(SB), 1, $0
349	MOVW	SPR(SPR_DEAR), R3
350	RETURN
351
352TEXT	tas32(SB), 1, $0
353	SYNC
354	MOVW	R3, R4
355	MOVW	$0xdead,R5
356tas1:
357	MSYNC
358	LWAR	(R4), R3
359	CMP	R3, $0
360	BNE	tas0
361	DCBT	(R4)				/* fix 405 errata cpu_210 */
362	STWCCC	R5, (R4)
363	BNE	tas1
364tas0:
365	SYNC
366	ISYNC
367	RETURN
368
369TEXT	eieio(SB), 1, $0
370	EIEIO
371	RETURN
372
373TEXT	syncall(SB), 1, $0
374	SYNC
375	ISYNC
376	RETURN
377
378TEXT _xinc(SB), 1, $0			/* void _xinc(long *); */
379	MOVW	R3, R4
380xincloop:
381	LWAR	(R4), R3
382	ADD	$1, R3
383	DCBT	(R4)				/* fix 405 errata cpu_210 */
384	STWCCC	R3, (R4)
385	BNE	xincloop
386	RETURN
387
388TEXT _xdec(SB), 1, $0			/* long _xdec(long *); */
389	MOVW	R3, R4
390xdecloop:
391	LWAR	(R4), R3
392	ADD	$-1, R3
393	DCBT	(R4)				/* fix 405 errata cpu_210 */
394	STWCCC	R3, (R4)
395	BNE	xdecloop
396	RETURN
397
398
399#define SPR_CSRR0	0x03a		/* Critical Save/Restore Register 0 */
400#define SPR_CSRR1	0x03b		/* Critical Save/Restore Register 1 */
401//#define SPR_DEAR	0x03d		/* Data Error Address Register */
402
403#define SPR_SPRG4R	0x104		/* SPR general 4; user/supervisor R */
404#define SPR_SPRG5R	0x105		/* SPR general 5; user/supervisor R */
405#define SPR_SPRG6R	0x106		/* SPR general 6; user/supervisor R */
406#define SPR_SPRG7R	0x107		/* SPR general 7; user/supervisor R */
407#define SPR_SPRG4W	0x114		/* SPR General 4; supervisor W */
408#define SPR_SPRG5W	0x115		/* SPR General 5; supervisor W  */
409#define SPR_SPRG6W	0x116		/* SPR General 6; supervisor W  */
410#define SPR_SPRG7W	0x117		/* SPR General 7; supervisor W */
411
412#define SPR_MCSRR0	0x23a
413#define SPR_MCSRR1	0x23b
414
415#define	SAVER0		SPR_SPRG0	/* shorthand use in save/restore */
416#define	SAVER1		SPR_SPRG1
417#define	SAVELR		SPR_SPRG2
418#define	SAVEXX		SPR_SPRG3
419
420#define	UREGSPACE	(UREGSIZE+8)
421
422#define RTBL		28		/* time stamp tracing */
423
424/*
425 * the 405 does not follow Book E: traps turn the mmu off.
426 * the following code has been executed at the exception
427 * vector location already:
428 *	MOVW	R0, SPR(SAVER0)
429 *	(critical interrupts disabled in MSR, using R0)
430 *	MOVW	LR, R0
431 *	MOVW	R0, SPR(SAVELR)
432 *	BL	trapvec(SB)
433 */
434TEXT	trapvec(SB), 1, $-4
435	MOVW	LR, R0
436	MOVW	R0, SPR(SAVEXX)			/* save interrupt vector offset */
437trapcommon:					/* entry point for machine checks */
438	MOVW	R1, SPR(SAVER1)			/* save stack pointer */
439
440	/* did we come from user space? */
441	MOVW	SPR(SPR_SRR1), R0
442	MOVW	CR, R1
443	MOVW	R0, CR
444	BC	4,17,ktrap			/* if MSR[PR]=0, we are in kernel space */
445
446	/* was user mode, switch to kernel stack and context */
447	MOVW	R1, CR
448	MOVW	SPR(SPR_SPRG7R), R1		/* up->kstack+KSTACK-UREGSPACE, set in touser and sysrforkret */
449	MFTB(TBRL, RTBL)
450	BL	saveureg(SB)
451
452//	MOVW	$mach0(SB), R(MACH)		/* FIX FIX FIX */
453//	MOVW	8(R(MACH)), R(USER)		/* FIX FIX FIX */
454//try this:
455/* 405s have no PIR; could use PVR */
456//	MOVW	SPR(SPR_PIR), R4		/* PIN */
457//	SLW	$2, R4				/* offset into pointer array */
458	MOVW	$0, R4				/* assume cpu 0 */
459	MOVW	$machptr(SB), R(MACH)		/* pointer array */
460	ADD	R4, R(MACH)			/* pointer to array element */
461	MOVW	(R(MACH)), R(MACH)		/* m-> */
462	MOVW	8(R(MACH)), R(USER)		/* up-> */
463
464	BL	trap(SB)
465	BR	restoreureg
466
467ktrap:
468	/* was kernel mode, R(MACH) and R(USER) already set */
469	MOVW	R1, CR
470	MOVW	SPR(SAVER1), R1
471	SUB	$UREGSPACE, R1		/* push onto current kernel stack */
472	BL	saveureg(SB)
473	BL	trap(SB)
474
475restoreureg:
476	MOVMW	48(R1), R2		/* r2:r31 */
477	/* defer R1, R0 */
478	MOVW	36(R1), R0
479	MOVW	R0, CTR
480	MOVW	32(R1), R0
481	MOVW	R0, XER
482	MOVW	28(R1), R0
483	MOVW	R0, CR	/* CR */
484	MOVW	24(R1), R0
485	MOVW	R0, LR
486	MOVW	20(R1), R0
487	MOVW	R0, SPR(SPR_SPRG7W)	/* kstack for traps from user space */
488	MOVW	16(R1), R0
489	MOVW	R0, SPR(SPR_SRR0)	/* old PC */
490	MOVW	12(R1), R0
491	RLWNM	$0, R0, $~MSR_WE, R0	/* remove wait state */
492	MOVW	R0, SPR(SPR_SRR1)	/* old MSR */
493	/* cause, skip */
494	MOVW	40(R1), R0
495	MOVW	44(R1), R1		/* old SP */
496	SYNC				/* fix 405 errata cpu_210 */
497	RFI
498
499/*
500 * machine check.
501 * make it look like the others.
502 * it's safe to destroy SPR_SRR0/1 because they can only be in
503 * use if a critical interrupt has interrupted a non-critical interrupt
504 * before it has had a chance to block critical interrupts,
505 * but no recoverable machine checks can occur during a critical interrupt,
506 * so the lost state doesn't matter.
507 */
508TEXT	trapmvec(SB), 1, $-4
509	MOVW	LR, R0
510	MOVW	R0, SPR(SAVEXX)
511	MOVW	SPR(SPR_MCSRR0), R0		/* PC or excepting insn */
512	MOVW	R0, SPR(SPR_SRR0)
513	MOVW	SPR(SPR_MCSRR1), R0		/* old MSR */
514	MOVW	R0, SPR(SPR_SRR1)
515	BR	trapcommon
516
517/*
518 * external interrupts (non-critical)
519 */
520TEXT	intrvec(SB), 1, $-4
521	MOVW	LR, R0
522	MOVW	R0, SPR(SAVEXX)			/* save interrupt vector offset */
523	MOVW	R1, SPR(SAVER1)			/* save stack pointer */
524
525	/* did we come from user space? */
526	MOVW	SPR(SPR_SRR1), R0
527	MOVW	CR, R1
528	MOVW	R0, CR
529	BC	4,17,intr1			/* if MSR[PR]=0, we are in kernel space */
530
531	/* was user mode, switch to kernel stack and context */
532	MOVW	R1, CR
533	MOVW	SPR(SPR_SPRG7R), R1		/* up->kstack+KSTACK-UREGSPACE, set in touser and sysrforkret */
534	BL	saveureg(SB)
535
536//	MOVW	$mach0(SB), R(MACH)		/* FIX FIX FIX */
537//	MOVW	8(R(MACH)), R(USER)
538//try this:
539/* 405s have no PIR */
540//	MOVW	SPR(SPR_PIR), R4		/* PIN */
541//	SLW	$2, R4				/* offset into pointer array */
542	MOVW	$0, R4				/* assume cpu 0 */
543	MOVW	$machptr(SB), R(MACH)		/* pointer array */
544	ADD	R4, R(MACH)			/* pointer to array element */
545	MOVW	(R(MACH)), R(MACH)		/* m-> */
546	MOVW	8(R(MACH)), R(USER)		/* up-> */
547
548	BL	intr(SB)
549	BR	restoreureg
550
551intr1:
552	/* was kernel mode, R(MACH) and R(USER) already set */
553	MOVW	R1, CR
554	MOVW	SPR(SAVER1), R1
555	SUB	$UREGSPACE, R1		/* push onto current kernel stack */
556	BL	saveureg(SB)
557	BL	intr(SB)
558	BR	restoreureg
559
560/*
561 * critical interrupt
562 */
563TEXT	critintrvec(SB), 1, $-4
564	MOVW	LR, R0
565	MOVW	R0, SPR(SAVEXX)
566	MOVW	R1, SPR(SAVER1)		/* save stack pointer */
567
568	/* did we come from user space? */
569	MOVW	SPR(SPR_CSRR1), R0
570	MOVW	CR, R1
571	MOVW	R0, CR
572	BC	4,16,kintrintr		/* if MSR[EE]=0, kernel was interrupted at start of intrvec */
573	BC	4,17,kcintr1		/* if MSR[PR]=0, we are in kernel space */
574
575ucintr:
576	/* was user mode or intrvec interrupted: switch to kernel stack and context */
577	MOVW	R1, CR
578	MOVW	SPR(SPR_SPRG7R), R1		/* up->kstack+KSTACK-UREGSPACE, set in touser and sysrforkret */
579	BL	saveureg(SB)
580
581//	MOVW	$mach0(SB), R(MACH)		/* FIX FIX FIX */
582//	MOVW	8(R(MACH)), R(USER)
583//try this:
584/* 405s have no PIR */
585//	MOVW	SPR(SPR_PIR), R4		/* PIN */
586//	SLW	$2, R4				/* offset into pointer array */
587	MOVW	$0, R4				/* assume cpu 0 */
588	MOVW	$machptr(SB), R(MACH)		/* pointer array */
589	ADD	R4, R(MACH)			/* pointer to array element */
590	MOVW	(R(MACH)), R(MACH)		/* m-> */
591	MOVW	8(R(MACH)), R(USER)		/* up-> */
592
593	BR	cintrcomm
594
595kintrintr:
596	/* kernel mode, and EE off, so kernel intrvec interrupted, but was previous mode kernel or user? */
597	MOVW	SPR(SPR_SRR1), R0
598	MOVW	R0, CR
599	BC	(4+8),17,ucintr	/* MSR[PR]=1, we were in user space, need set up */
600
601kcintr1:
602	/*  was kernel mode and external interrupts enabled, R(MACH) and R(USER) already set */
603	MOVW	R1, CR
604	MOVW	SPR(SAVER1), R1
605	SUB	$UREGSPACE, R1	/* push onto current kernel stack */
606	BL	saveureg(SB)
607
608cintrcomm:
609	/* special part of Ureg for critical interrupts only (using Ureg.dcmp, Ureg.icmp, Ureg.dmiss) */
610	MOVW	SPR(SPR_SPRG6R), R4	/* critical interrupt saves volatile R0 in SPRG6 */
611	MOVW	R4, (160+8)(R1)
612	MOVW	SPR(SPR_CSRR0), R4	/* store critical interrupt pc */
613	MOVW	R4, (164+8)(R1)
614	MOVW	SPR(SPR_CSRR1), R4	/* critical interrupt msr */
615	MOVW	R4, (168+8)(R1)
616
617	BL	intr(SB)
618
619	/* first restore usual part of Ureg */
620	MOVMW	48(R1), R2	/* r2:r31 */
621	/* defer R1, R0 */
622	MOVW	40(R1), R0
623	MOVW	R0, SPR(SAVER0)		/* restore normal r0 save */
624	MOVW	36(R1), R0
625	MOVW	R0, CTR
626	MOVW	32(R1), R0
627	MOVW	R0, XER
628	MOVW	28(R1), R0
629	MOVW	R0, CR	/* CR */
630	MOVW	24(R1), R0
631	MOVW	R0, LR
632	MOVW	20(R1), R0
633	MOVW	R0, SPR(SPR_SPRG7W)	/* kstack for traps from user space */
634	MOVW	16(R1), R0
635	MOVW	R0, SPR(SPR_SRR0)	/* saved normal PC */
636	MOVW	12(R1), R0
637	MOVW	R0, SPR(SPR_SRR1)	/* saved normal MSR */
638
639	/* restore special bits for critical interrupts */
640	MOVW	(164+8)(R1), R0		/* critical interrupt's saved pc */
641	MOVW	R0, SPR(SPR_CSRR0)
642	MOVW	(168+8)(R1), R0
643	RLWNM	$0, R0, $~MSR_WE, R0	/* remove wait state */
644	MOVW	R0, SPR(SPR_CSRR1)
645
646	/* cause, skip */
647	MOVW	(160+8)(R1), R0		/* critical interrupt's saved R0 */
648	MOVW	44(R1), R1		/* old SP */
649	RFCI
650
651/*
652 * enter with stack set and mapped.
653 * on return, SB (R2) has been set, and R3 has the Ureg*,
654 * the MMU has been re-enabled, kernel text and PC are in KSEG,
655 * Stack (R1), R(MACH) and R(USER) are set by caller, if required.
656 */
657TEXT	saveureg(SB), 1, $-4
658	MOVMW	R2, 48(R1)			/* save gprs r2 to r31 */
659	MOVW	$setSB(SB), R2
660	MOVW	SPR(SAVER1), R4
661	MOVW	R4, 44(R1)
662	MOVW	SPR(SAVER0), R5
663	MOVW	R5, 40(R1)
664	MOVW	CTR, R6
665	MOVW	R6, 36(R1)
666	MOVW	XER, R4
667	MOVW	R4, 32(R1)
668	MOVW	CR, R5
669	MOVW	R5, 28(R1)
670	MOVW	SPR(SAVELR), R6			/* LR */
671	MOVW	R6, 24(R1)
672	MOVW	SPR(SPR_SPRG7R), R6		/* up->kstack+KSTACK-UREGSPACE */
673	MOVW	R6, 20(R1)
674	MOVW	SPR(SPR_SRR0), R0
675	MOVW	R0, 16(R1)			/* PC of excepting insn (or next insn) */
676	MOVW	SPR(SPR_SRR1), R0
677	MOVW	R0, 12(R1)			/* old MSR */
678	MOVW	SPR(SAVEXX), R0
679	MOVW	R0, 8(R1)			/* cause/vector */
680	ADD	$8, R1, R3			/* Ureg* */
681	DCBT	(R1)				/* fix 405 errata cpu_210 */
682	STWCCC	R3, (R1)			/* break any pending reservations */
683	MOVW	$0, R0				/* compiler/linker expect R0 to be zero */
684	RETURN
685