1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 #ifndef _THR_INLINES_H
28 #define _THR_INLINES_H
29
30 #pragma ident "%Z%%M% %I% %E% SMI"
31
32 #if !defined(__lint) && defined(__GNUC__)
33
34 /* inlines for gcc */
35
36 extern __inline__ ulwp_t *
_curthread(void)37 _curthread(void)
38 {
39 #if defined(__amd64)
40 ulwp_t *__value;
41 __asm__ __volatile__("movq %%fs:0, %0" : "=r" (__value));
42 #elif defined(__i386)
43 ulwp_t *__value;
44 __asm__ __volatile__("movl %%gs:0, %0" : "=r" (__value));
45 #elif defined(__sparc)
46 register ulwp_t *__value __asm__("g7");
47 #else
48 #error "port me"
49 #endif
50 return (__value);
51 }
52
53 extern __inline__ ulwp_t *
__curthread(void)54 __curthread(void)
55 {
56 ulwp_t *__value;
57 __asm__ __volatile__(
58 #if defined(__amd64)
59 "movq %%fs:0, %0\n\t"
60 #elif defined(__i386)
61 "movl %%gs:0, %0\n\t"
62 #elif defined(__sparcv9)
63 ".register %%g7, #scratch\n\t"
64 "ldx [%%g7 + 80], %0\n\t"
65 #elif defined(__sparc)
66 ".register %%g7, #scratch\n\t"
67 "ld [%%g7 + 80], %0\n\t"
68 #else
69 #error "port me"
70 #endif
71 "1:"
72 : "=r" (__value)
73 : : "cc");
74 return (__value);
75 }
76
77 extern __inline__ greg_t
stkptr(void)78 stkptr(void)
79 {
80 #if defined(__amd64)
81 register greg_t __value __asm__("rsp");
82 #elif defined(__i386)
83 register greg_t __value __asm__("esp");
84 #elif defined(__sparc)
85 register greg_t __value __asm__("sp");
86 #else
87 #error "port me"
88 #endif
89 return (__value);
90 }
91
92 extern __inline__ hrtime_t
gethrtime(void)93 gethrtime(void) /* note: caller-saved registers are trashed */
94 {
95 #if defined(__amd64)
96 hrtime_t __value;
97 __asm__ __volatile__(
98 "movl $3, %%eax\n\t"
99 "int $0xd2"
100 : "=a" (__value)
101 : : "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11", "cc");
102 #elif defined(__i386)
103 hrtime_t __value;
104 __asm__ __volatile__(
105 "movl $3, %%eax\n\t"
106 "int $0xd2"
107 : "=A" (__value)
108 : : "ecx", "cc");
109 #elif defined(__sparcv9)
110 register hrtime_t __value __asm__("o0");
111 __asm__ __volatile__(
112 "ta 0x24\n\t"
113 "sllx %%o0, 32, %0\n\t"
114 "or %%o1, %0, %0"
115 : "=r" (__value)
116 : : "o1", "o2", "o3", "o4", "o5", "cc");
117 #elif defined(__sparc)
118 register hrtime_t __value __asm__("o0");
119 __asm__ __volatile__(
120 "ta 0x24"
121 : "=r" (__value)
122 : : "o2", "o3", "o4", "o5", "cc");
123 #else
124 #error "port me"
125 #endif
126 return (__value);
127 }
128
129 extern __inline__ int
set_lock_byte(volatile uint8_t * __lockp)130 set_lock_byte(volatile uint8_t *__lockp)
131 {
132 int __value;
133 #if defined(__x86)
134 __asm__ __volatile__(
135 "movl $1, %0\n\t"
136 "xchgb %%dl, %1"
137 : "+d" (__value), "+m" (*__lockp));
138 #elif defined(__sparc)
139 __asm__ __volatile__(
140 "ldstub %1, %0\n\t"
141 "membar #LoadLoad"
142 : "=r" (__value), "+m" (*__lockp));
143 #else
144 #error "port me"
145 #endif
146 return (__value);
147 }
148
149 extern __inline__ uint32_t
atomic_swap_32(volatile uint32_t * __memory,uint32_t __value)150 atomic_swap_32(volatile uint32_t *__memory, uint32_t __value)
151 {
152 #if defined(__x86)
153 __asm__ __volatile__(
154 "xchgl %0, %1"
155 : "+q" (__value), "+m" (*__memory));
156 return (__value);
157 #elif defined(__sparc)
158 uint32_t __tmp1, __tmp2;
159 __asm__ __volatile__(
160 "ld [%3], %0\n\t"
161 "1:\n\t"
162 "mov %4, %1\n\t"
163 "cas [%3], %0, %1\n\t"
164 "cmp %0, %1\n\t"
165 "bne,a,pn %%icc, 1b\n\t"
166 " mov %1, %0"
167 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
168 : "r" (__memory), "r" (__value)
169 : "cc");
170 return (__tmp2);
171 #else
172 #error "port me"
173 #endif
174 }
175
176 extern __inline__ uint32_t
atomic_cas_32(volatile uint32_t * __memory,uint32_t __cmp,uint32_t __newvalue)177 atomic_cas_32(volatile uint32_t *__memory, uint32_t __cmp, uint32_t __newvalue)
178 {
179 uint32_t __oldvalue;
180 #if defined(__x86)
181 __asm__ __volatile__(
182 "lock; cmpxchgl %3, %0"
183 : "=m" (*__memory), "=a" (__oldvalue)
184 : "a" (__cmp), "r" (__newvalue));
185 #elif defined(__sparc)
186 __asm__ __volatile__(
187 "cas [%2], %3, %1"
188 : "=m" (*__memory), "=&r" (__oldvalue)
189 : "r" (__memory), "r" (__cmp), "1" (__newvalue));
190 #else
191 #error "port me"
192 #endif
193 return (__oldvalue);
194 }
195
196 extern __inline__ void
atomic_inc_32(volatile uint32_t * __memory)197 atomic_inc_32(volatile uint32_t *__memory)
198 {
199 #if defined(__x86)
200 __asm__ __volatile__(
201 "lock; incl %0"
202 : "+m" (*__memory));
203 #elif defined(__sparc)
204 uint32_t __tmp1, __tmp2;
205 __asm__ __volatile__(
206 "ld [%3], %0\n\t"
207 "1:\n\t"
208 "add %0, 1, %1\n\t"
209 "cas [%3], %0, %1\n\t"
210 "cmp %0, %1\n\t"
211 "bne,a,pn %%icc, 1b\n\t"
212 " mov %1, %0"
213 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
214 : "r" (__memory)
215 : "cc");
216 #else
217 #error "port me"
218 #endif
219 }
220
221 extern __inline__ void
atomic_dec_32(volatile uint32_t * __memory)222 atomic_dec_32(volatile uint32_t *__memory)
223 {
224 #if defined(__x86)
225 __asm__ __volatile__(
226 "lock; decl %0"
227 : "+m" (*__memory));
228 #elif defined(__sparc)
229 uint32_t __tmp1, __tmp2;
230 __asm__ __volatile__(
231 "ld [%3], %0\n\t"
232 "1:\n\t"
233 "sub %0, 1, %1\n\t"
234 "cas [%3], %0, %1\n\t"
235 "cmp %0, %1\n\t"
236 "bne,a,pn %%icc, 1b\n\t"
237 " mov %1, %0"
238 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
239 : "r" (__memory)
240 : "cc");
241 #else
242 #error "port me"
243 #endif
244 }
245
246 extern __inline__ void
atomic_and_32(volatile uint32_t * __memory,uint32_t __bits)247 atomic_and_32(volatile uint32_t *__memory, uint32_t __bits)
248 {
249 #if defined(__x86)
250 __asm__ __volatile__(
251 "lock; andl %1, %0"
252 : "+m" (*__memory)
253 : "r" (__bits));
254 #elif defined(__sparc)
255 uint32_t __tmp1, __tmp2;
256 __asm__ __volatile__(
257 "ld [%3], %0\n\t"
258 "1:\n\t"
259 "and %0, %4, %1\n\t"
260 "cas [%3], %0, %1\n\t"
261 "cmp %0, %1\n\t"
262 "bne,a,pn %%icc, 1b\n\t"
263 " mov %1, %0"
264 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
265 : "r" (__memory), "r" (__bits)
266 : "cc");
267 #else
268 #error "port me"
269 #endif
270 }
271
272 extern __inline__ void
atomic_or_32(volatile uint32_t * __memory,uint32_t __bits)273 atomic_or_32(volatile uint32_t *__memory, uint32_t __bits)
274 {
275 #if defined(__x86)
276 __asm__ __volatile__(
277 "lock; orl %1, %0"
278 : "+m" (*__memory)
279 : "r" (__bits));
280 #elif defined(__sparc)
281 uint32_t __tmp1, __tmp2;
282 __asm__ __volatile__(
283 "ld [%3], %0\n\t"
284 "1:\n\t"
285 "or %0, %4, %1\n\t"
286 "cas [%3], %0, %1\n\t"
287 "cmp %0, %1\n\t"
288 "bne,a,pn %%icc, 1b\n\t"
289 " mov %1, %0"
290 : "=&r" (__tmp1), "=&r" (__tmp2), "=m" (*__memory)
291 : "r" (__memory), "r" (__bits)
292 : "cc");
293 #else
294 #error "port me"
295 #endif
296 }
297
298 #if defined(__sparc) /* only needed on sparc */
299
300 extern __inline__ ulong_t
caller(void)301 caller(void)
302 {
303 register ulong_t __value __asm__("i7");
304 return (__value);
305 }
306
307 extern __inline__ ulong_t
getfp(void)308 getfp(void)
309 {
310 register ulong_t __value __asm__("fp");
311 return (__value);
312 }
313
314 #endif /* __sparc */
315
316 #if defined(__x86) /* only needed on x86 */
317
318 extern __inline__ void
ht_pause(void)319 ht_pause(void)
320 {
321 __asm__ __volatile__("rep; nop");
322 }
323
324 #endif /* __x86 */
325
326 #endif /* !__lint && __GNUC__ */
327
328 #endif /* _THR_INLINES_H */
329