xref: /plan9-contrib/sys/src/9/vt4/l.s (revision cce710d3fba6018ef0c8c1b503e500038744d49c)
1/* virtex4 ppc405 machine assist */
2#include	"mem.h"
3
4/*
5 * 405 Special Purpose Registers of interest here
6 */
7#define SPR_CCR0	947		/* Core Configuration Register 0 */
8#define SPR_DAC1	1014		/* Data Address Compare 1 */
9#define SPR_DAC2	1015		/* Data Address Compare 2 */
10#define SPR_DBCR0	1010		/* Debug Control Register 0 */
11#define SPR_DBCR1	957		/* Debug Control Register 1 */
12#define SPR_DBSR	1008		/* Debug Status Register */
13#define SPR_DCCR	1018		/* Data Cache Cachability Register */
14#define SPR_DCWR	954		/* Data Cache Write-through Register */
15#define SPR_DVC1	950		/* Data Value Compare 1 */
16#define SPR_DVC2	951		/* Data Value Compare 2 */
17#define SPR_DEAR	981		/* Data Error Address Register */
18#define SPR_ESR		980		/* Exception Syndrome Register */
19#define SPR_EVPR	982		/* Exception Vector Prefix Register */
20#define SPR_IAC1	1012		/* Instruction Address Compare 1 */
21#define SPR_IAC2	1013		/* Instruction Address Compare 2 */
22#define SPR_IAC3	948		/* Instruction Address Compare 3 */
23#define SPR_IAC4	949		/* Instruction Address Compare 4 */
24#define SPR_ICCR	1019		/* Instruction Cache Cachability Register */
25#define SPR_ICDBDR	979		/* Instruction Cache Debug Data Register */
26#define SPR_PID		945		/* Process ID */
27#define SPR_PIT		987		/* Programmable Interval Timer */
28#define SPR_PVR		287		/* Processor Version Register */
29#define SPR_SGR		953		/* Store Guarded Register */
30#define SPR_SLER	955		/* Storage Little Endian Register */
31#define SPR_SPRG0	272		/* SPR General 0 */
32#define SPR_SPRG1	273		/* SPR General 1 */
33#define SPR_SPRG2	274		/* SPR General 2 */
34#define SPR_SPRG3	275		/* SPR General 3 */
35
36#define SPR_USPRG0	256		/* user SPR G0 */
37
38/* beware that these registers differ in R/W ability on 440 compared to 405 */
39#define SPR_SPRG4R		SPR_SPRG4W	/* SPR general 4 supervisor R*/
40#define SPR_SPRG5R		SPR_SPRG5W	/* SPR general 5; supervisor R */
41#define SPR_SPRG6R		SPR_SPRG6W	/* SPR general 6; supervisor R */
42#define SPR_SPRG7R		SPR_SPRG7W	/* SPR general 7; supervisor R */
43#define SPR_SPRG4W	0x114		/* SPR General 4; supervisor R/W */
44#define SPR_SPRG5W	0x115		/* SPR General 5; supervisor R/W  */
45#define SPR_SPRG6W	0x116		/* SPR General 6; supervisor R/W  */
46#define SPR_SPRG7W	0x117		/* SPR General 7; supervisor R/W */
47
48#define SPR_SRR0	26		/* Save/Restore Register 0 */
49#define SPR_SRR1	27		/* Save/Restore Register 1 */
50#define SPR_SRR2	990		/* Save/Restore Register 2 */
51#define SPR_SRR3	991		/* Save/Restore Register 3 */
52#define SPR_SU0R	956		/* Storage User-defined 0 Register */
53#define SPR_TBL		284		/* Time Base Lower */
54#define SPR_TBU		85		/* Time Base Upper */
55#define SPR_TCR		986		/* Time Control Register */
56#define SPR_TSR		984		/* Time Status Register */
57#define SPR_ZPR		944		/* Zone Protection Register */
58
59/* use of SPRG registers in save/restore */
60#define	SAVER0	SPR_SPRG0
61#define	SAVER1	SPR_SPRG1
62#define	SAVELR	SPR_SPRG2
63#define	SAVEXX	SPR_SPRG3
64
65/* special instruction definitions */
66#define	BDNZ	BC	16,0,
67#define	BDNE	BC	0,2,
68
69#define	TBRL	268	/* read time base lower in MFTB */
70#define	TBRU	269	/* read time base upper in MFTB */
71#define	MFTB(tbr,d)	WORD	$((31<<26)|((d)<<21)|((tbr&0x1f)<<16)|(((tbr>>5)&0x1f)<<11)|(371<<1))
72
73#define	TLBIA		WORD	$((31<<26)|(370<<1))
74#define	TLBSYNC		WORD	$((31<<26)|(566<<1))
75
76/* 400 models; perhaps others */
77#define	ICCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(966<<1))
78#define	DCCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(454<<1))
79/* these follow the source -> dest ordering */
80#define	DCREAD(s,t)	WORD	$((31<<26)|((t)<<21)|((s)<<11)|(486<<1))
81/* #define	MSYNC		WORD	$((31<<26)|(598<<1))   /* in the 405? */
82#define	TLBRELO(a,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|(1<<11)|(946<<1))
83#define	TLBREHI(a,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|(0<<11)|(946<<1))
84#define	TLBWELO(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(1<<11)|(978<<1))
85#define	TLBWEHI(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(0<<11)|(978<<1))
86#define	TLBSXF(a,b,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|((b)<<11)|(914<<1))
87#define	TLBSXCC(a,b,t)	WORD	$((31<<26)|((t)<<21)|((a)<<16)|((b)<<11)|(914<<1)|1)
88#define	WRTMSR_EE(s)	WORD	$((31<<26)|((s)<<21)|(131<<1)); MSRSYNC
89#define	WRTMSR_EEI(e)	WORD	$((31<<26)|((e)<<15)|(163<<1)); MSRSYNC
90
91/*
92 * there are three flavours of barrier: MBAR, MSYNC and ISYNC.
93 * ISYNC is a context sync, a strong instruction barrier.
94 * MSYNC is an execution sync (weak instruction barrier) + data storage barrier.
95 * MBAR is a memory (data storage) barrier.
96 */
97#define MBAR	EIEIO
98/* on some models mtmsr doesn't synchronise enough (eg, 603e) */
99#define	MSRSYNC	SYNC; ISYNC
100
101/*
102 * on the 400 series, the prefetcher madly fetches across RFI, sys call,
103 * and others; use BR 0(PC) to stop it.
104 */
105#define	RFI	WORD $((19<<26)|(50<<1)); BR 0(PC)
106#define	RFCI	WORD $((19<<26)|(51<<1)); BR 0(PC)
107
108#define ORI(imm, reg)	WORD $((24<<26) | (reg)<<21 | (reg)<<16 | (imm))
109#define ORIS(imm, reg)	WORD $((25<<26) | (reg)<<21 | (reg)<<16 | (imm))
110
111/* print progress character.  steps on R7 and R8, needs SB set. */
112#define PROG(c)	MOVW $(Uartlite+4), R7; MOVW $(c), R8; MOVW R8, 0(R7)
113
114#define	UREGSPACE	(UREGSIZE+8)
115
116	NOSCHED
117
118	TEXT start(SB), 1, $-4
119
120	/*
121	 * our bootstrap may have already turned on the MMU.
122	 * setup MSR
123	 * turn off interrupts, FPU & MMU
124	 * use 0x000 as exception prefix
125	 * don't enable machine check until the vector is set up
126	 */
127	MOVW	MSR, R3
128	RLWNM	$0, R3, $~MSR_EE, R3
129	RLWNM	$0, R3, $~MSR_FP, R3
130	RLWNM	$0, R3, $~(MSR_IR|MSR_DR), R3
131	RLWNM	$0, R3, $~MSR_ME, R3
132	ISYNC
133	MOVW	R3, MSR
134	MSRSYNC
135
136	/* except during trap handling, R0 is zero from now on */
137	MOVW	$0, R0
138	MOVW	R0, CR
139
140	/* setup SB for pre mmu */
141	MOVW	$setSB-KZERO(SB), R2	/* SB until mmu on */
142
143PROG('\r')
144PROG('\n')
145PROG('P')
146
147	MOVW	$18, R18
148	MOVW	$19, R19
149	MOVW	$20, R20
150	MOVW	$21, R21
151	MOVW	$22, R22
152	MOVW	$23, R23
153	MOVW	$24, R24
154
155	/*
156	 * reset the caches and disable them until mmu on
157	 */
158	MOVW	R0, SPR(SPR_ICCR)
159	ICCCI(0, 2)  /* errata cpu_121 reveals that EA is used; we'll use SB */
160	ISYNC
161	DCCCI(0, 0)
162	MSRSYNC
163
164	MOVW	$((DCACHEWAYSIZE/DCACHELINESZ)-1), R3
165	MOVW	R3, CTR
166	MOVW	R0, R3
167dcinv:
168	DCCCI(0,3)
169	ADD	$32, R3
170	BDNZ	dcinv
171
172	/*
173	 * cache is write-back; no user-defined 0; big endian throughout.
174	 * write-through cache would require putting ~0 into R3.
175	 */
176	MOVW	R0, SPR(SPR_DCWR)	/* write-through nowhere: write-back */
177
178	/* starting from the high bit, each bit represents 128MB */
179	MOVW	R0, R3			/* region bits */
180	MOVW	R3, SPR(SPR_DCCR)	/* caches off briefly */
181	MOVW	R3, SPR(SPR_ICCR)
182	ISYNC
183	MOVW	R0, SPR(SPR_SU0R)
184	MOVW	R0, SPR(SPR_SLER)
185	ISYNC
186
187	/*
188	 * CCR0:
189	 *	1<<25 LWL load word as line
190	 *	1<<11 PFC prefetching for cacheable regions
191	 */
192	MOVW	SPR(SPR_CCR0), R4
193	OR	$((1<<25)|(1<<11)), R4
194	MOVW	R4, SPR(SPR_CCR0)
195
196	/* R3 still has region bits */
197	NOR	R3, R3		/* no speculative access in uncached mem */
198	MOVW	R3, SPR(SPR_SGR)
199	ISYNC
200
201	/*
202	 * set other system configuration values
203	 */
204	MOVW	R0, SPR(SPR_PIT)
205	MOVW	$~0, R3
206	MOVW	R3, SPR(SPR_TSR)
207
208PROG('l')
209	BL	kernelmmu(SB)
210	/* now running with MMU on */
211
212	/* set R2 to correct value */
213	MOVW	$setSB(SB), R2
214
215	/*
216	 * now running with MMU in kernel address space
217	 * invalidate the caches again to flush any addresses
218	 * below KZERO
219	 */
220	ICCCI(0, 2)  /* errata cpu_121 reveals that EA is used; we'll use SB */
221	ISYNC
222
223PROG('a')
224
225	/*
226	 * config caches for kernel in real mode; data is write-through.
227	 * cache bottom 128MB (dram) & top 128MB (sram), but not I/O reg.s.
228	 */
229	MOVW	$((1<<31) | (1<<0)), R3
230	MOVW	R3, SPR(SPR_DCCR)
231	MOVW	R3, SPR(SPR_ICCR)
232	ISYNC
233	/* R3 still has region bits */
234	NOR	R3, R3		/* no speculative access in uncached mem */
235	MOVW	R3, SPR(SPR_SGR)
236	ISYNC
237
238	/* no kfpinit on 4xx */
239
240	MOVW	dverify(SB), R3
241	MOVW	$0x01020304, R4
242	CMP	R3, R4
243	BEQ	dataok
244
245PROG('?')
246	/* seriously bad news, punt to vector 0x1500 (unused) */
247	MOVW	$(PHYSSRAM + 0x1500), R3
248	BL	0(R3)
249
250dataok:
251	/* set up Mach */
252	MOVW	$mach0(SB), R(MACH)
253	ADD	$(MACHSIZE-8), R(MACH), R1	/* set stack */
254/*
255 * mach0 is in bss, so this loop is redundant
256	SUB	$4, R(MACH), R3
257	ADD	$4, R1, R4
258clrmach:
259	MOVWU	R0, 4(R3)
260	CMP	R3, R4
261	BNE	clrmach
262 */
263
264PROG('n')
265	MOVW	$edata-4(SB), R3
266	MOVW	$end(SB), R4
267clrbss:
268	MOVWU	R0, 4(R3)
269	CMP	R3, R4
270	BNE	clrbss
271
272	MOVW	R0, R(USER)
273	MOVW	R0, 0(R(MACH))
274
275PROG(' ')
276PROG('9')
277PROG('\r')
278PROG('\n')
279	BL	main(SB)
280	BR	0(PC)   /* paranoia -- not reached */
281
282GLOBL	mach0(SB), $(MAXMACH*BY2PG)
283
284TEXT	kernelmmu(SB), 1, $-4
285	TLBIA
286	ISYNC
287	SYNC
288
289	/* make following TLB entries shared, TID=PID=0 */
290	MOVW	R0, SPR(SPR_PID)
291	ISYNC
292
293	/* zone 0 is superviser-only; zone 1 is user and supervisor; all access controlled by TLB */
294	MOVW	$((0<<30)|(1<<28)), R5
295	MOVW	R5, SPR(SPR_ZPR)
296
297	/* map various things 1:1 */
298	MOVW	$tlbtab-KZERO(SB), R4
299	MOVW	$tlbtabe-KZERO(SB), R5
300	SUB	R4, R5
301	/* size in bytes is now in R5 */
302	MOVW	$(2*4), R6
303	DIVW	R6, R5
304	/* Number of TLBs is now in R5 */
305	SUB	$4, R4
306	MOVW	R5, CTR
307	/* at start of this loop, # TLBs in CTR, R3 is tlb index 0, R4 is pointing at
308	  * tlbstart[-1] (for pre-increment below)
309	  */
310	/* last thing to do: use 63 as index as we put kernel TLBs at top */
311	MOVW	$63, R3
312ltlb:
313	MOVWU	4(R4), R5	/* TLBHI */
314	TLBWEHI(5,3)
315	MOVWU	4(R4), R5	/* TLBLO */
316	TLBWELO(5,3)
317	SUB	$1, R3
318	BDNZ	ltlb
319
320	/* enable MMU */
321	MOVW	LR, R3
322	OR	$KZERO, R3
323	MOVW	R3, SPR(SPR_SRR0)
324	MOVW	MSR, R4
325	OR	$(MSR_IR|MSR_DR), R4
326	MOVW	R4, SPR(SPR_SRR1)
327/*	ISYNC	/* no ISYNC here as we have no TLB entry for the PC (without KZERO) */
328	SYNC		/* fix 405 errata cpu_210 */
329	RFI		/* resume in kernel mode in caller */
330
331TEXT	splhi(SB), 1, $-4
332	MOVW	MSR, R3
333	WRTMSR_EEI(0)
334	MOVW	LR, R31
335	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
336	RETURN
337
338/*
339 * everything from here up to, but excluding, spldone
340 * will be billed by devkprof to the pc saved when we went splhi.
341 */
342TEXT	spllo(SB), 1, $-4
343	MOVW	MSR, R3
344	WRTMSR_EEI(1)
345	RETURN
346
347TEXT	splx(SB), 1, $-4
348	MOVW	LR, R31
349	MOVW	R31, 4(R(MACH))	/* save PC in m->splpc */
350	/* fall though */
351
352TEXT	splxpc(SB), 1, $-4
353	WRTMSR_EE(3)
354	RETURN
355
356/***/
357TEXT	spldone(SB), 1, $-4
358	RETURN
359
360TEXT	islo(SB), 1, $-4
361	MOVW	MSR, R3
362	RLWNM	$0, R3, $MSR_EE, R3
363	RETURN
364
365TEXT	setlabel(SB), 1, $-4
366	MOVW	LR, R31
367	MOVW	R1, 0(R3)
368	MOVW	R31, 4(R3)
369	MOVW	$0, R3
370	RETURN
371
372TEXT	gotolabel(SB), 1, $-4
373	MOVW	4(R3), R31
374	MOVW	R31, LR
375	MOVW	0(R3), R1
376	MOVW	$1, R3
377	RETURN
378
379TEXT	touser(SB), 1, $-4
380	WRTMSR_EEI(0)
381	MOVW	R(USER), R4			/* up */
382	MOVW	8(R4), R4			/* up->kstack */
383	RLWNM	$0, R4, $~KZERO, R4		/* PADDR(up->kstack) */
384	ADD	$(KSTACK-UREGSPACE), R4
385	MOVW	R4, SPR(SPR_SPRG7W)	/* save for use in traps/interrupts */
386	MOVW	$(UTZERO+32), R5	/* header appears in text */
387	MOVW	$UMSR, R4
388	MOVW	R4, SPR(SPR_SRR1)
389	MOVW	R3, R1
390	MOVW	R5, SPR(SPR_SRR0)
391	ISYNC
392	SYNC				/* fix 405 errata cpu_210 */
393	RFI
394
395TEXT	icflush(SB), 1, $-4	/* icflush(virtaddr, count) */
396	MOVW	n+4(FP), R4
397	RLWNM	$0, R3, $~(ICACHELINESZ-1), R5
398	SUB	R5, R3
399	ADD	R3, R4
400	ADD	$(ICACHELINESZ-1), R4
401	SRAW	$ICACHELINELOG, R4
402	MOVW	R4, CTR
403icf0:	ICBI	(R5)
404	ADD	$ICACHELINESZ, R5
405	BDNZ	icf0
406	ISYNC
407	RETURN
408
409TEXT	dcflush(SB), 1, $-4	/* dcflush(virtaddr, count) */
410	MOVW	n+4(FP), R4
411	RLWNM	$0, R3, $~(DCACHELINESZ-1), R5
412	CMP	R4, $0
413	BLE	dcf1
414	SUB	R5, R3
415	ADD	R3, R4
416	ADD	$(DCACHELINESZ-1), R4
417	SRAW	$DCACHELINELOG, R4
418	MOVW	R4, CTR
419dcf0:	DCBF	(R5)
420	ADD	$DCACHELINESZ, R5
421	BDNZ	dcf0
422dcf1:
423	SYNC
424	RETURN
425
426TEXT	tas32(SB), 1, $0
427	SYNC
428	MOVW	R3, R4
429	MOVW	$0xdead,R5
430tas1:
431	DCBF	(R4)	/* fix for 603x bug */
432	LWAR	(R4), R3
433	CMP	R3, $0
434	BNE	tas0
435	DCBT	(R4)			/* fix 405 errata cpu_210 */
436	STWCCC	R5, (R4)
437	BNE	tas1
438tas0:
439	SYNC
440	ISYNC
441	RETURN
442
443TEXT _xinc(SB), 1, $-4			/* long _xinc(long*); */
444TEXT ainc(SB), 1, $-4			/* long ainc(long*); */
445	MOVW	R3, R4
446_ainc:
447	DCBF	(R4)			/* fix for 603x bug */
448	LWAR	(R4), R3
449	ADD	$1, R3
450	DCBT	(R4)			/* fix 405 errata cpu_210 */
451	STWCCC	R3, (R4)
452	BNE	_ainc
453
454	CMP	R3, $0			/* overflow if -ve or 0 */
455	BGT	_return
456_trap:
457	MOVW	$0, R0
458	MOVW	(R0), R0		/* over under sideways down */
459_return:
460	RETURN
461
462TEXT _xdec(SB), 1, $-4			/* long _xdec(long*); */
463TEXT adec(SB), 1, $-4			/* long adec(long*); */
464	MOVW	R3, R4
465_adec:
466	DCBF	(R4)			/* fix for 603x bug */
467	LWAR	(R4), R3
468	ADD	$-1, R3
469	DCBT	(R4)			/* fix 405 errata cpu_210 */
470	STWCCC	R3, (R4)
471	BNE	_adec
472
473	CMP	R3, $0			/* underflow if -ve */
474	BLT	_trap
475
476	RETURN
477
478TEXT cas32(SB), 1, $0			/* int cas32(void*, u32int, u32int) */
479	MOVW	R3, R4			/* addr */
480	MOVW	old+4(FP), R5
481	MOVW	new+8(FP), R6
482	DCBF	(R4)			/* fix for 603x bug? */
483	LWAR	(R4), R3
484	CMP	R3, R5
485	BNE	 fail
486	DCBT	(R4)			/* fix 405 errata cpu_210 */
487	STWCCC	R6, (R4)
488	BNE	 fail
489	MOVW	 $1, R3
490	RETURN
491fail:
492	MOVW	 $0, R3
493	RETURN
494
495TEXT	getpit(SB), 1, $0
496	MOVW	SPR(SPR_PIT), R3
497	RETURN
498
499TEXT	putpit(SB), 1, $0
500	MOVW	R3, SPR(SPR_PIT)
501	RETURN
502
503TEXT	putpid(SB), 1, $0
504	ISYNC
505	MOVW	R3, SPR(SPR_PID)
506	ISYNC
507	RETURN
508
509TEXT	getpid(SB), 1, $0
510	MOVW	SPR(SPR_PID), R3
511	RETURN
512
513/* 405s have no PIR, so use low bits of PVR, which rae can set. */
514TEXT	getpir(SB), 1, $-4
515	MOVW	SPR(SPR_PVR), R3
516	ANDCC	$017, R3
517	RETURN
518
519TEXT	gettbl(SB), 1, $0
520	MFTB(TBRL, 3)
521	RETURN
522
523TEXT	gettbu(SB), 1, $0
524	MFTB(TBRU, 3)
525	RETURN
526
527TEXT	gettsr(SB), 1, $0
528	MOVW	SPR(SPR_TSR), R3
529	RETURN
530
531TEXT	puttsr(SB), 1, $0
532	MOVW	R3, SPR(SPR_TSR)
533	RETURN
534
535TEXT	gettcr(SB), 1, $0
536	MOVW	SPR(SPR_TCR), R3
537	RETURN
538
539TEXT	puttcr(SB), 1, $0
540	MOVW	R3, SPR(SPR_TCR)
541	RETURN
542
543TEXT	getpvr(SB), 1, $0
544	MOVW	SPR(SPR_PVR), R3
545	RETURN
546
547TEXT	getmsr(SB), 1, $0
548	MOVW	MSR, R3
549	RETURN
550
551TEXT	putmsr(SB), 1, $0
552	SYNC
553	MOVW	R3, MSR
554	MSRSYNC
555	RETURN
556
557TEXT	putsr(SB), 1, $0
558	MOVW	4(FP), R4
559	MOVW	R4, SEG(R3)
560	RETURN
561
562TEXT	getesr(SB), 1, $0
563	MOVW	SPR(SPR_ESR), R3
564	RETURN
565
566TEXT	putesr(SB), 1, $0
567	MOVW	R3, SPR(SPR_ESR)
568	ISYNC
569	RETURN
570
571TEXT	putevpr(SB), 1, $0
572	MOVW	R3, SPR(SPR_EVPR)
573	ISYNC
574	RETURN
575
576TEXT	getccr0(SB), 1, $-4
577	MOVW	SPR(SPR_CCR0), R3
578	RETURN
579
580TEXT	getdear(SB), 1, $0
581	MOVW	SPR(SPR_DEAR), R3
582	RETURN
583
584TEXT	mbar(SB), 1, $-4
585TEXT	eieio(SB), 1, $-4
586	MBAR
587	RETURN
588
589TEXT	barriers(SB), 1, $-4
590TEXT	sync(SB), 1, $-4
591	SYNC
592	RETURN
593
594TEXT	isync(SB), 1, $-4
595	ISYNC
596	RETURN
597
598TEXT	tlbwrx(SB), 1, $-4
599	MOVW	hi+4(FP), R5
600	MOVW	lo+8(FP), R6
601	SYNC
602	TLBWEHI(5, 3)
603	TLBWELO(6, 3)
604	ISYNC
605	SYNC			/* paranoia; inferno cerf405 port does this */
606	RETURN
607
608TEXT	tlbrehi(SB), 1, $-4
609	TLBREHI(3, 3)
610	RETURN
611
612TEXT	tlbrelo(SB), 1, $-4
613	TLBRELO(3, 3)
614	RETURN
615
616TEXT	tlbsxcc(SB), 1, $-4
617	TLBSXCC(0, 3, 3)
618	BEQ	tlbsxcc0
619	MOVW	$-1, R3	/* not found */
620tlbsxcc0:
621	RETURN
622
623TEXT	gotopc(SB), 1, $0
624	MOVW	R3, CTR
625	MOVW	LR, R31	/* for trace back */
626	BR	(CTR)
627
628TEXT	dtlbmiss(SB), 1, $-4
629	MOVW	R3, SPR(SPR_SPRG1)
630	MOVW	$INT_DMISS, R3
631	MOVW	R3, SPR(SAVEXX)	/* value for cause if entry not in soft tlb */
632	MOVW	SPR(SPR_DEAR), R3
633	BR	tlbmiss
634
635TEXT	itlbmiss(SB), 1, $-4
636	MOVW	R3, SPR(SPR_SPRG1)
637	MOVW	$INT_IMISS, R3
638	MOVW	R3, SPR(SAVEXX)
639	MOVW	SPR(SPR_SRR0), R3
640
641tlbmiss:
642	/* R3 contains missed address */
643	RLWNM	$0, R3, $~(BY2PG-1), R3	/* just the page */
644	MOVW	R2, SPR(SPR_SPRG0)
645	MOVW	R4, SPR(SPR_SPRG2)
646	MOVW	R5, SPR(SPR_SPRG6W)
647	MOVW	R6, SPR(SPR_SPRG4W)
648	MOVW	CR, R6
649	MOVW	R6, SPR(SPR_SPRG5W)
650	MOVW	$setSB-KZERO(SB), R2
651	MOVW	$mach0-KZERO(SB), R2
652	MOVW	(6*4)(R2), R4	/* m->tlbfault++ */
653	ADD	$1, R4
654	MOVW	R4, (6*4)(R2)
655	MOVW	SPR(SPR_PID), R4
656	SRW	$12, R3, R6
657	RLWMI	$2, R4, $(0xFF<<2), R3	/* shift and insert PID for match */
658	XOR	R3, R6		/* hash=(va>>12)^(pid<<2); (assumes STLBSIZE is 10 to 12 bits) */
659	MOVW	(3*4)(R2), R5		/* m->pstlb == PADDR(m->stlb) */
660	RLWNM	$3, R6, $((STLBSIZE-1)<<3), R6	/* shift to index 8-byte entries, and mask */
661	MOVWU	(R6+R5), R4	/* fetch Softtlb.hi for comparison; updated address goes to R5 */
662	CMP	R4, R3
663	BNE	tlbtrap
664	MFTB(TBRL, 6)
665	MOVW	(4*4)(R2), R4	/* m->utlbhi */
666	RLWNM	$0, R6, $(NTLB-1), R6		/* pseudo-random tlb index */
667	CMP	R6, R4
668	BLE	tlbm1
669	SUB	R4, R6
670tlbm1:
671	RLWNM	$0, R3, $~(BY2PG-1), R3
672	OR	$(TLB4K | TLBVALID), R3	/* make valid tlb hi */
673	TLBWEHI(3, 6)
674	MOVW	4(R5), R4	/* tlb lo; R3 is high */
675	TLBWELO(4, 6)
676	ISYNC
677	MOVW	SPR(SPR_SPRG5R), R6
678	MOVW	R6, CR
679	MOVW	SPR(SPR_SPRG4R), R6
680	MOVW	SPR(SPR_SPRG6R), R5
681	MOVW	SPR(SPR_SPRG2), R4
682	MOVW	SPR(SPR_SPRG1), R3
683	MOVW	SPR(SPR_SPRG0), R2
684
685	ISYNC
686	SYNC			/* fixes 405 errata cpu_210 */
687	RFI
688
689tlbtrap:
690	MOVW	SPR(SPR_SPRG5R), R6
691	MOVW	R6, CR
692	MOVW	SPR(SPR_SPRG4R), R6
693	MOVW	SPR(SPR_SPRG6R), R5
694	MOVW	SPR(SPR_SPRG2), R4
695	MOVW	SPR(SPR_SPRG1), R3
696	MOVW	SPR(SPR_SPRG0), R2
697	MOVW	R0, SPR(SAVER0)
698	MOVW	LR, R0
699	MOVW	R0, SPR(SAVELR)
700	BR	trapcommon
701
702/*
703 * traps force memory mapping off.
704 * the following code has been executed at the exception
705 * vector location already:
706 *	MOVW R0, SPR(SAVER0)
707 *	MOVW LR, R0
708 *	MOVW R0, SPR(SAVELR)
709 *	bl	trapvec(SB)
710 */
711TEXT	trapvec(SB), 1, $-4
712	MOVW	LR, R0
713	RLWNM	$0, R0, $~0x1F, R0		/* mask LR increment to get ivoff */
714	MOVW	R0, SPR(SAVEXX)			/* save ivoff--interrupt vector offset */
715trapcommon:					/* entry point for critical interrupts */
716
717	MOVW	R1, SPR(SAVER1)			/* save stack pointer */
718	/* did we come from user space */
719	MOVW	SPR(SPR_SRR1), R0
720	MOVW	CR, R1
721	MOVW	R0, CR
722	BC	4,17,ktrap			/* if MSR[PR]=0, we are in kernel space */
723
724	/* switch to kernel stack */
725	MOVW	R1, CR
726	MOVW	SPR(SPR_SPRG7R), R1		/* PADDR(up->kstack+KSTACK-UREGSPACE), set in touser and sysrforkret */
727	BL	saveureg(SB)
728	MOVW	$mach0(SB), R(MACH)
729	MOVW	8(R(MACH)), R(USER)
730	BL	trap(SB)
731	BR	restoreureg
732
733ktrap:
734	MOVW	R1, CR
735	MOVW	SPR(SAVER1), R1
736	RLWNM	$0, R1, $~KZERO, R1		/* PADDR(R1) */
737	SUB	$UREGSPACE, R1	/* push onto current kernel stack */
738	BL	saveureg(SB)
739	BL	trap(SB)
740
741restoreureg:
742	MOVMW	48(R1), R2	/* r2:r31 */
743	/* defer R1 */
744	MOVW	40(R1), R0
745	MOVW	R0, SPR(SAVER0)
746	MOVW	36(R1), R0
747	MOVW	R0, CTR
748	MOVW	32(R1), R0
749	MOVW	R0, XER
750	MOVW	28(R1), R0
751	MOVW	R0, CR	/* CR */
752	MOVW	24(R1), R0
753	MOVW	R0, LR
754	MOVW	20(R1), R0
755	MOVW	R0, SPR(SPR_SPRG7W)	/* PADDR(up->kstack etc.) for traps from user space */
756	MOVW	16(R1), R0
757	MOVW	R0, SPR(SPR_SRR0)	/* old PC */
758	MOVW	12(R1), R0
759	RLWNM	$0, R0, $~MSR_WE, R0	/* remove wait state */
760	MOVW	R0, SPR(SPR_SRR1)	/* old MSR */
761	/* cause, skip */
762	MOVW	44(R1), R1	/* old SP */
763	MOVW	SPR(SAVER0), R0
764	ISYNC
765	SYNC				/* fixes 405 errata cpu_210 */
766	RFI
767
768/*
769 * critical trap/interrupt.
770 * the only one we can take is machine check, synchronously, and
771 * outside any other trap handler.
772 * [MSR_ME not cleared => handler may be interrupted by machine check]
773 */
774TEXT	trapcritvec(SB), 1, $-4
775	MOVW	LR, R0
776	MOVW	R0, SPR(SAVEXX)
777	MOVW	SPR(SPR_SRR2), R0		/* PC or excepting insn */
778	MOVW	R0, SPR(SPR_SRR0)
779	MOVW	SPR(SPR_SRR3), R0		/* old MSR */
780	MOVW	R0, SPR(SPR_SRR1)
781	BR	trapcommon
782
783/*
784 * enter with stack set and mapped.
785 * on return, R0 is zero, SB (R2) has been set, and R3 has the Ureg*,
786 * the MMU has been re-enabled, kernel text and PC are in KSEG,
787 * Stack (R1), R(MACH) and R(USER) are set by caller, if required.
788 */
789TEXT	saveureg(SB), 1, $-4
790/*
791 * save state
792 */
793	MOVMW	R2, 48(R1)			/* save gprs r2 to r31 */
794	MOVW	$setSB(SB), R2
795	MOVW	SPR(SAVER1), R4
796	MOVW	R4, 44(R1)
797	MOVW	SPR(SAVER0), R5
798	MOVW	R5, 40(R1)
799	MOVW	CTR, R6
800	MOVW	R6, 36(R1)
801	MOVW	XER, R4
802	MOVW	R4, 32(R1)
803	MOVW	CR, R5
804	MOVW	R5, 28(R1)
805	MOVW	SPR(SAVELR), R6	/* LR */
806	MOVW	R6, 24(R1)
807	MOVW	SPR(SPR_SPRG7R), R6		/* PADDR(up->kstack+KSTACK-UREGSPACE) */
808	MOVW	R6, 20(R1)
809	MOVW	SPR(SPR_SRR0), R0
810	MOVW	R0, 16(R1)			/* PC of excepting insn (or next insn) */
811	MOVW	SPR(SPR_SRR1), R0
812	MOVW	R0, 12(R1)			/* old MSR */
813	MOVW	SPR(SAVEXX), R0
814	MOVW	R0, 8(R1)	/* cause/vector */
815	ADD	$8, R1, R3	/* Ureg* */
816	OR	$KZERO, R3	/* fix ureg */
817	DCBT	(R1)		/* fix 405 errata cpu_210 */
818	STWCCC	R3, (R1)	/* break any pending reservations */
819	MOVW	$0, R0	/* compiler/linker expect R0 to be zero */
820
821	MOVW	MSR, R5
822	OR	$(MSR_IR|MSR_DR), R5	/* enable MMU */
823	MOVW	R5, SPR(SPR_SRR1)
824	MOVW	LR, R31
825	OR	$KZERO, R31	/* return PC in KSEG0 */
826	MOVW	R31, SPR(SPR_SRR0)
827	OR	$KZERO, R1	/* fix stack pointer */
828/*	ISYNC			/* no ISYNC here either */
829	SYNC			/* fix 405 errata cpu_210 */
830	RFI	/* returns to trap handler */
831
832/*
833 * restore state from Ureg and return from trap/interrupt
834 */
835TEXT forkret(SB), 1, $-4
836	RLWNM	$0, R1, $~KZERO, R2	/* PADDR(up->kstack+KSTACK-UREGSPACE) */
837	MOVW	R2, 20(R1)	/* set in ureg */
838	BR	restoreureg
839
840/*
841 * 4xx specific
842 */
843TEXT	firmware(SB), 1, $0
844	MOVW	$(3<<28), R3
845	MOVW	R3, SPR(SPR_DBCR0)	/* system reset */
846	BR	0(PC)
847