xref: /plan9-contrib/sys/src/boot/vt5/microboot.s (revision 98a68993d685eb89080b5d73dba0a8f0a226848c)
1/*
2 * ppc440x5 `microboot': immediately after reset, initialise the machine,
3 * notably TLB entries, sufficiently that we can get out of the last 4K of
4 * memory.  these nonsense constraints appears to be specific to the 440x5.
5 */
6#include	"mem.h"
7
8#define MB	(1024*1024)
9
10#define SPR_PID		0x30		/* Process ID (not the same as 405) */
11#define SPR_MMUCR	0x3B2		/* mmu control */
12#define SPR_CCR0	0x3b3		/* Core Configuration Register 0 */
13#define SPR_CCR1	0x378		/* core configuration register 1 */
14#define SPR_SRR0	0x01a		/* Save/Restore Register 0 */
15#define SPR_SRR1	0x01b		/* Save/Restore Register 1 */
16
17#define	ICCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(966<<1))
18#define	DCCCI(a,b)	WORD	$((31<<26)|((a)<<16)|((b)<<11)|(454<<1))
19#define	MSYNC		WORD	$((31<<26)|(598<<1))
20
21#define	TLBWELO(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(2<<11)|(978<<1))
22#define	TLBWEMD(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(1<<11)|(978<<1))
23#define	TLBWEHI(s,a)	WORD	$((31<<26)|((s)<<21)|((a)<<16)|(0<<11)|(978<<1))
24
25	NOSCHED
26
27	TEXT	_main(SB), 1, $-4
28fakestart:
29	CMP	R2, R3
30	BEQ	lastword		/* force loading of last word */
31start:
32	/* we can't issue a synchronising instr. until tlbs are all loaded */
33	MOVW	$setSB(SB), R2
34
35	MOVW	$0, R0
36	DCCCI(0, 2) /* this flush invalidates the dcache of a 440 (must not be in use) */
37
38	/* see l.s */
39	MOVW	$((1<<30)|(0<<21)|(1<<15)|(0<<8)|(0<<2)), R3
40	MOVW	R3, SPR(SPR_CCR0)
41	MOVW	$(0<<7), R3	/* TCS=0 */
42	MOVW	R3, SPR(SPR_CCR1)
43
44	/* allocate cache on store miss, disable U1 as transient, disable U2 as SWOA, no dcbf or icbi exception, tlbsx search 0 */
45	MOVW	R0, SPR(SPR_MMUCR)
46	ICCCI(0, 2) /* this flushes the icache of a 440; the errata reveals that EA is used; we'll use SB */
47
48	MOVW	R0, CTR
49	MOVW	R0, XER
50
51	/* make following TLB entries shared, TID=PID=0 */
52	MOVW	R0, SPR(SPR_PID)
53
54	/* last two tlb entries cover 128K sram */
55	MOVW	$63, R3
56	MOVW	$(PHYSSRAM | TLB64K | TLBVALID), R5	/* TLBHI */
57	TLBWEHI(5,3)
58	MOVW	$(PHYSSRAM), R5				/* TLBMD */
59	TLBWEMD(5,3)
60	MOVW	$(TLBSR | TLBSX | TLBSW | TLBI), R5	/* TLBLO */
61	TLBWELO(5,3)
62	SUB	$1, R3
63
64	MOVW	$(PHYSSRAM+(64*1024) | TLB64K | TLBVALID), R5	/* TLBHI */
65	TLBWEHI(5,3)
66	MOVW	$(PHYSSRAM+(64*1024)), R5		/* TLBMD */
67	TLBWEMD(5,3)
68	MOVW	$(TLBSR | TLBSX | TLBSW | TLBI), R5	/* TLBLO */
69	TLBWELO(5,3)
70	SUB	$1, R3
71
72	/* cover DRAM in case we're going straight to kernel */
73	MOVW	$(PHYSDRAM | TLB256MB | TLBVALID), R5	/* TLBHI */
74	TLBWEHI(5,3)
75	MOVW	$(PHYSDRAM), R5				/* TLBMD */
76	TLBWEMD(5,3)
77	MOVW	$(TLBSR | TLBSX | TLBSW | TLBW), R5	/* TLBLO */
78	TLBWELO(5,3)
79	SUB	$1, R3
80
81	MOVW	$(PHYSDRAM+(256*MB) | TLB256MB | TLBVALID), R5	/* TLBHI */
82	TLBWEHI(5,3)
83	MOVW	$(PHYSDRAM+(256*MB)), R5		/* TLBMD */
84	TLBWEMD(5,3)
85	MOVW	$(TLBSR | TLBSX | TLBSW | TLBW), R5	/* TLBLO */
86	TLBWELO(5,3)
87	SUB	$1, R3
88
89	/* and I/O registers too.  sigh. */
90	MOVW	$(PHYSMMIO | TLB1MB | TLBVALID), R5	/* TLBHI */
91	TLBWEHI(5,3)
92	MOVW	$(PHYSMMIO), R5				/* TLBMD */
93	TLBWEMD(5,3)
94	MOVW	$(TLBSR | TLBSW | TLBI | TLBG), R5	/* TLBLO */
95	TLBWELO(5,3)
96	SUB	$1, R3
97
98	/* invalidate the other TLB entries for now */
99	MOVW	R0, R5
100ztlb:
101	/* can't use 0 (R0) as first operand */
102	TLBWEHI(5,3)
103	TLBWEMD(5,3)
104	TLBWELO(5,3)
105	SUB	$1, R3
106	CMP	R3, $0
107	BGE	ztlb
108
109	/*
110	 * we're currently relying on the shadow I/D TLBs.  to switch to
111	 * the new TLBs, we need a synchronising instruction.
112	 */
113	MOVW	bootstart(SB), R3
114	MOVW	R3, SPR(SPR_SRR0)
115	MOVW	R0, SPR(SPR_SRR1)	/* new MSR */
116	RFI
117
118TEXT	bootstart(SB), 1, $-4
119	WORD	$0xfffe2100
120lastword:
121	/* this instruction must land at 0xfffffffc */
122/* this jump works for addresses within 32MB of zero (1st & last 32MB) */
123/*	WORD	$((18 << 26) | (0x03FFFFFC & 0xfffe2100) | 2) */
124	BR	start
125