xref: /netbsd-src/sys/arch/riscv/include/sysreg.h (revision 9fb66d812c00ebfb445c0b47dea128f32aa6fe96)
1 /* $NetBSD: sysreg.h,v 1.11 2020/12/16 19:49:04 christos Exp $ */
2 
3 /*
4  * Copyright (c) 2014 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * This code is derived from software contributed to The NetBSD Foundation
8  * by Matt Thomas of 3am Software Foundry.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  * POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 #ifndef _RISCV_SYSREG_H_
33 #define _RISCV_SYSREG_H_
34 
35 #ifndef _KERNEL
36 #include <sys/param.h>
37 #endif
38 
39 #define FCSR_FMASK	0	// no exception bits
40 #define FCSR_FRM	__BITS(7,5)
41 #define FCSR_FRM_RNE	0b000	// Round Nearest, ties to Even
42 #define FCSR_FRM_RTZ	0b001	// Round Towards Zero
43 #define FCSR_FRM_RDN	0b010	// Round DowN (-infinity)
44 #define FCSR_FRM_RUP	0b011	// Round UP (+infinity)
45 #define FCSR_FRM_RMM	0b100	// Round to nearest, ties to Max Magnitude
46 #define FCSR_FRM_DYN	0b111	// Dynamic rounding
47 #define FCSR_FFLAGS	__BITS(4,0)	// Sticky bits
48 #define FCSR_NV		__BIT(4)	// iNValid operation
49 #define FCSR_DZ		__BIT(3)	// Divide by Zero
50 #define FCSR_OF		__BIT(2)	// OverFlow
51 #define FCSR_UF		__BIT(1)	// UnderFlow
52 #define FCSR_NX		__BIT(0)	// iNeXact
53 
54 static inline uint32_t
55 riscvreg_fcsr_read(void)
56 {
57 	uint32_t __fcsr;
58 	__asm("frcsr %0" : "=r"(__fcsr));
59 	return __fcsr;
60 }
61 
62 
63 static inline uint32_t
64 riscvreg_fcsr_write(uint32_t __new)
65 {
66 	uint32_t __old;
67 	__asm("fscsr %0, %1" : "=r"(__old) : "r"(__new));
68 	return __old;
69 }
70 
71 static inline uint32_t
72 riscvreg_fcsr_read_fflags(void)
73 {
74 	uint32_t __old;
75 	__asm("frflags %0" : "=r"(__old));
76 	return __SHIFTOUT(__old, FCSR_FFLAGS);
77 }
78 
79 static inline uint32_t
80 riscvreg_fcsr_write_fflags(uint32_t __new)
81 {
82 	uint32_t __old;
83 	__new = __SHIFTIN(__new, FCSR_FFLAGS);
84 	__asm("fsflags %0, %1" : "=r"(__old) : "r"(__new));
85 	return __SHIFTOUT(__old, FCSR_FFLAGS);
86 }
87 
88 static inline uint32_t
89 riscvreg_fcsr_read_frm(void)
90 {
91 	uint32_t __old;
92 	__asm("frrm\t%0" : "=r"(__old));
93 	return __SHIFTOUT(__old, FCSR_FRM);
94 }
95 
96 static inline uint32_t
97 riscvreg_fcsr_write_frm(uint32_t __new)
98 {
99 	uint32_t __old;
100 	__new = __SHIFTIN(__new, FCSR_FRM);
101 	__asm volatile("fsrm\t%0, %1" : "=r"(__old) : "r"(__new));
102 	return __SHIFTOUT(__old, FCSR_FRM);
103 }
104 
105 /* Supervisor Status Register */
106 #ifdef _LP64
107 #define SR_WPRI		__BITS(62, 34) | __BITS(31,20) | __BIT(17) | \
108 			    __BITS(12,9) | __BITS(7,6) | __BITS(3,2)
109 #define SR_SD		__BIT(63)
110 			/* Bits 62-34 are WPRI */
111 #define SR_UXL		__BITS(33,32)
112 #define  SR_UXL_32	1
113 #define  SR_UXL_64	2
114 #define  SR_UXL_128	3
115 			/* Bits 31-20 are WPRI*/
116 #else
117 #define SR_WPRI		__BITS(30,20) | __BIT(17) | __BITS(12,9) | \
118 			    __BITS(7,6) | __BITS(3,2)
119 #define SR_SD		__BIT(31)
120 			/* Bits 30-20 are WPRI*/
121 #endif /* _LP64 */
122 
123 /* Both RV32 and RV64 have the bottom 20 bits shared */
124 #define SR_MXR		__BIT(19)
125 #define SR_SUM		__BIT(18)
126 			/* Bit 17 is WPRI */
127 #define SR_XS		__BITS(16,15)
128 #define SR_FS		__BITS(14,13)
129 #define  SR_FS_OFF	0
130 #define  SR_FS_INITIAL	1
131 #define  SR_FS_CLEAN	2
132 #define  SR_FS_DIRTY	3
133 
134 			/* Bits 12-9 are WPRI */
135 #define SR_SPP		__BIT(8)
136 			/* Bits 7-6 are WPRI */
137 #define SR_SPIE		__BIT(5)
138 #define SR_UPIE		__BIT(4)
139 			/* Bits 3-2 are WPRI */
140 #define SR_SIE		__BIT(1)
141 #define SR_UIE		__BIT(0)
142 
143 /* Supervisor interrupt registers */
144 /* ... interrupt pending register (sip) */
145 			/* Bit (XLEN-1)-10 is WIRI */
146 #define SIP_SEIP	__BIT(9)
147 #define SIP_UEIP	__BIT(8)
148 			/* Bit 7-6 is WIRI */
149 #define SIP_STIP	__BIT(5)
150 #define SIP_UTIP	__BIT(4)
151 			/* Bit 3-2 is WIRI */
152 #define SIP_SSIP	__BIT(1)
153 #define SIP_USIP	__BIT(0)
154 
155 /* ... interrupt-enable register (sie) */
156 			/* Bit (XLEN-1) - 10 is WIRI */
157 #define SIE_SEIE	__BIT(9)
158 #define SIE_UEIE	__BIT(8)
159 			/* Bit 7-6 is WIRI */
160 #define SIE_STIE	__BIT(5)
161 #define SIE_UTIE	__BIT(4)
162 			/* Bit 3-2 is WIRI */
163 #define SIE_SSIE	__BIT(1)
164 #define SIE_USIE	__BIT(0)
165 
166 /* Mask for all interrupts */
167 #define SIE_IM		(SIE_SEI|SIE_UEIE|SIE_STIE|SIE_UTIE|SIE_SSIE|SIE_USIE)
168 
169 #ifdef _LP64
170 #define	SR_USER		(SR_UIE)
171 #define	SR_USER32	(SR_USER)
172 #define	SR_KERNEL	(SR_SIE | SR_UIE)
173 #else
174 #define	SR_USER		(SR_UIE)
175 #define	SR_KERNEL	(SR_SIE | SR_UIE)
176 #endif
177 
178 static inline uint32_t
179 riscvreg_status_read(void)
180 {
181 	uint32_t __sr;
182 	__asm("csrr\t%0, sstatus" : "=r"(__sr));
183 	return __sr;
184 }
185 
186 static inline uint32_t
187 riscvreg_status_clear(uint32_t __mask)
188 {
189 	uint32_t __sr;
190 	if (__builtin_constant_p(__mask) && __mask < 0x20) {
191 		__asm("csrrci\t%0, sstatus, %1" : "=r"(__sr) : "i"(__mask));
192 	} else {
193 		__asm("csrrc\t%0, sstatus, %1" : "=r"(__sr) : "r"(__mask));
194 	}
195 	return __sr;
196 }
197 
198 static inline uint32_t
199 riscvreg_status_set(uint32_t __mask)
200 {
201 	uint32_t __sr;
202 	if (__builtin_constant_p(__mask) && __mask < 0x20) {
203 		__asm("csrrsi\t%0, sstatus, %1" : "=r"(__sr) : "i"(__mask));
204 	} else {
205 		__asm("csrrs\t%0, sstatus, %1" : "=r"(__sr) : "r"(__mask));
206 	}
207 	return __sr;
208 }
209 
210 // Cause register
211 #define CAUSE_FETCH_MISALIGNED		0
212 #define CAUSE_FETCH_ACCESS		1
213 #define CAUSE_ILLEGAL_INSTRUCTION	2
214 #define CAUSE_BREAKPOINT		3
215 #define CAUSE_LOAD_MISALIGNED		4
216 #define CAUSE_LOAD_ACCESS		5
217 #define CAUSE_STORE_MISALIGNED		6
218 #define CAUSE_STORE_ACCESS		7
219 #define CAUSE_SYSCALL			8
220 #define CAUSE_USER_ECALL		8
221 #define CAUSE_SUPERVISOR_ECALL		9
222 /* 10 is reserved */
223 #define CAUSE_MACHINE_ECALL		11
224 #define CAUSE_FETCH_PAGE_FAULT		12
225 #define CAUSE_LOAD_PAGE_FAULT		13
226 /* 14 is Reserved */
227 #define CAUSE_STORE_PAGE_FAULT		15
228 /* >= 16 is reserved */
229 
230 static inline uint64_t
231 riscvreg_cycle_read(void)
232 {
233 #ifdef _LP64
234 	uint64_t __lo;
235 	__asm __volatile("csrr\t%0, cycle" : "=r"(__lo));
236 	return __lo;
237 #else
238 	uint32_t __hi0, __hi1, __lo0;
239 	do {
240 		__asm __volatile(
241 			"csrr\t%[__hi0], cycleh"
242 		"\n\t"	"csrr\t%[__lo0], cycle"
243 		"\n\t"	"csrr\t%[__hi1], cycleh"
244 		   :	[__hi0] "=r"(__hi0),
245 			[__lo0] "=r"(__lo0),
246 			[__hi1] "=r"(__hi1));
247 	} while (__hi0 != __hi1);
248 	return ((uint64_t)__hi0 << 32) | (uint64_t)__lo0;
249 #endif
250 }
251 
252 #ifdef _LP64
253 #define SATP_MODE		__BITS(63,60)
254 #define  SATP_MODE_SV39		8
255 #define  SATP_MODE_SV48		9
256 #define SATP_ASID		__BITS(59,44)
257 #define SATP_PPN		__BITS(43,0)
258 #else
259 #define SATP_MODE		__BIT(31)
260 #define  SATP_MODE_SV32		1
261 #define SATP_ASID		__BITS(30,22)
262 #define SATP_PPN		__BITS(21,0)
263 #endif
264 
265 static inline uint32_t
266 riscvreg_asid_read(void)
267 {
268 	uintptr_t satp;
269 	__asm __volatile("csrr	%0, satp" : "=r" (satp));
270 	return __SHIFTOUT(satp, SATP_ASID);
271 }
272 
273 static inline void
274 riscvreg_asid_write(uint32_t asid)
275 {
276 	uintptr_t satp;
277 	__asm __volatile("csrr	%0, satp" : "=r" (satp));
278 	satp &= ~SATP_ASID;
279 	satp |= __SHIFTIN((uintptr_t)asid, SATP_ASID);
280 	__asm __volatile("csrw	satp, %0" :: "r" (satp));
281 }
282 
283 #endif /* _RISCV_SYSREG_H_ */
284