1 /* $OpenBSD: cpufunc.h,v 1.11 2023/01/25 09:53:53 kettenis Exp $ */
2
3 /*
4 * Copyright (c) 2020 Mark Kettenis <kettenis@openbsd.org>
5 *
6 * Permission to use, copy, modify, and distribute this software for any
7 * purpose with or without fee is hereby granted, provided that the above
8 * copyright notice and this permission notice appear in all copies.
9 *
10 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 */
18
19 #ifndef _MACHINE_CPUFUNC_H_
20 #define _MACHINE_CPUFUNC_H_
21
22 static inline void
eieio(void)23 eieio(void)
24 {
25 __asm volatile ("eieio" ::: "memory");
26 }
27
28 static inline void
isync(void)29 isync(void)
30 {
31 __asm volatile ("isync" ::: "memory");
32 }
33
34 static inline void
ptesync(void)35 ptesync(void)
36 {
37 __asm volatile ("ptesync" ::: "memory");
38 }
39
40 static inline void
sync(void)41 sync(void)
42 {
43 __asm volatile ("sync" ::: "memory");
44 }
45
46 static inline void
slbia(void)47 slbia(void)
48 {
49 __asm volatile ("slbia");
50 }
51
52 static inline void
slbie(uint64_t esid)53 slbie(uint64_t esid)
54 {
55 __asm volatile ("slbie %0" :: "r"(esid));
56 }
57
58 static inline uint64_t
slbmfee(uint64_t entry)59 slbmfee(uint64_t entry)
60 {
61 uint64_t value;
62 __asm volatile ("slbmfee %0, %1" : "=r"(value) : "r"(entry));
63 return value;
64 }
65
66 static inline void
slbmte(uint64_t slbv,uint64_t slbe)67 slbmte(uint64_t slbv, uint64_t slbe)
68 {
69 __asm volatile ("slbmte %0, %1" :: "r"(slbv), "r"(slbe));
70 }
71
72 static inline void
tlbie(uint64_t ava)73 tlbie(uint64_t ava)
74 {
75 __asm volatile ("tlbie %0, %1" :: "r"(ava), "r"(0));
76 }
77
78 static inline void
tlbiel(uint64_t ava)79 tlbiel(uint64_t ava)
80 {
81 __asm volatile ("tlbiel %0" :: "r"(ava));
82 }
83
84 static inline void
tlbsync(void)85 tlbsync(void)
86 {
87 __asm volatile ("tlbsync" ::: "memory");
88 }
89
90 static inline uint64_t
mfmsr(void)91 mfmsr(void)
92 {
93 uint64_t value;
94 __asm volatile ("mfmsr %0" : "=r"(value));
95 return value;
96 }
97
98 static inline void
mtmsr(uint64_t value)99 mtmsr(uint64_t value)
100 {
101 __asm volatile ("mtmsr %0" :: "r"(value));
102 }
103
104 static inline uint64_t
mftb(void)105 mftb(void)
106 {
107 uint64_t value;
108 __asm volatile ("mftb %0" : "=r"(value));
109 return value;
110 }
111
112 static inline uint32_t
mfdsisr(void)113 mfdsisr(void)
114 {
115 uint32_t value;
116 __asm volatile ("mfdsisr %0" : "=r"(value));
117 return value;
118 }
119
120 static inline uint64_t
mfdar(void)121 mfdar(void)
122 {
123 uint64_t value;
124 __asm volatile ("mfdar %0" : "=r"(value));
125 return value;
126 }
127
128 static inline void
mtdec(uint32_t value)129 mtdec(uint32_t value)
130 {
131 __asm volatile ("mtdec %0" :: "r"(value));
132 }
133
134 static inline void
mtsdr1(uint64_t value)135 mtsdr1(uint64_t value)
136 {
137 __asm volatile ("mtsdr1 %0" :: "r"(value));
138 }
139
140 static inline void
mtamr(uint64_t value)141 mtamr(uint64_t value)
142 {
143 __asm volatile ("mtspr 29, %0" :: "r"(value));
144 }
145
146 static inline void
mtfscr(uint64_t value)147 mtfscr(uint64_t value)
148 {
149 __asm volatile ("mtspr 153, %0" :: "r"(value));
150 }
151
152 static inline void
mtuamor(uint64_t value)153 mtuamor(uint64_t value)
154 {
155 __asm volatile ("mtspr 157, %0" :: "r"(value));
156 }
157
158 static inline uint32_t
mfpvr(void)159 mfpvr(void)
160 {
161 uint32_t value;
162 __asm volatile ("mfspr %0, 287" : "=r"(value));
163 return value;
164 }
165
166 static inline uint64_t
mflpcr(void)167 mflpcr(void)
168 {
169 uint64_t value;
170 __asm volatile ("mfspr %0, 318" : "=r"(value));
171 return value;
172 }
173
174 static inline void
mtlpcr(uint64_t value)175 mtlpcr(uint64_t value)
176 {
177 __asm volatile ("mtspr 318, %0" :: "r"(value));
178 }
179
180 #define LPCR_PECE 0x000040000001f000UL
181 #define LPCR_LPES 0x0000000000000008UL
182 #define LPCR_HVICE 0x0000000000000002UL
183
184 static inline void
mtamor(uint64_t value)185 mtamor(uint64_t value)
186 {
187 __asm volatile ("mtspr 349, %0" :: "r"(value));
188 }
189
190 static inline void
mtptcr(uint64_t value)191 mtptcr(uint64_t value)
192 {
193 __asm volatile ("mtspr 464, %0" :: "r"(value));
194 }
195
196 static inline uint64_t
mfpmsr(void)197 mfpmsr(void)
198 {
199 uint64_t value;
200 __asm volatile ("mfspr %0, 853" : "=r"(value));
201 return value;
202 }
203
204 static inline void
mtpmcr(uint64_t value)205 mtpmcr(uint64_t value)
206 {
207 __asm volatile ("mtspr 884, %0" :: "r"(value));
208 }
209
210 static inline uint32_t
mfpir(void)211 mfpir(void)
212 {
213 uint32_t value;
214 __asm volatile ("mfspr %0, 1023" : "=r"(value));
215 return value;
216 }
217
218 extern int cacheline_size;
219
220 void __syncicache(void *, size_t);
221
222 #endif /* _MACHINE_CPUFUNC_H_ */
223