xref: /netbsd-src/common/lib/libc/atomic/atomic_init_testset.c (revision 5be0c2a9f68724826c47f16e1a040ebd89707519)
1 /*	$NetBSD: atomic_init_testset.c,v 1.19 2024/01/21 03:42:08 thorpej Exp $	*/
2 
3 /*-
4  * Copyright (c) 2008 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26  * POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 /*
30  * libc glue for atomic operations where the hardware does not provide
31  * compare-and-swap.  It's assumed that this will only be used on 32-bit
32  * platforms.
33  *
34  * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35  * if using gcc.
36  */
37 
38 #include <sys/cdefs.h>
39 __RCSID("$NetBSD: atomic_init_testset.c,v 1.19 2024/01/21 03:42:08 thorpej Exp $");
40 
41 #include "extern.h"
42 #include "atomic_op_namespace.h"
43 
44 #include <sys/types.h>
45 #include <sys/atomic.h>
46 #include <sys/lock.h>
47 #include <sys/ras.h>
48 #include <sys/sysctl.h>
49 
50 #include <string.h>
51 
52 #define	I2	__SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
53 #define	I16	I2 I2 I2 I2 I2 I2 I2 I2
54 #define	I128	I16 I16 I16 I16 I16 I16 I16 I16
55 
56 static __cpu_simple_lock_t atomic_locks[128] = { I128 };
57 /*
58  * Pick a lock out of above array depending on the object address
59  * passed. Most variables used atomically will not be in the same
60  * cacheline - and if they are, using the same lock is fine.
61  */
62 #define HASH(PTR)	(((uintptr_t)(PTR) >> 3) & 127)
63 
64 #ifdef	__HAVE_ASM_ATOMIC_CAS_UP
65 extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
66 #else
67 static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
68 #endif
69 static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
70     _atomic_cas_up;
71 RAS_DECL(_atomic_cas);
72 
73 #ifdef	__HAVE_ATOMIC_CAS_64_UP
74 #ifdef	__HAVE_ASM_ATOMIC_CAS_64_UP
75 extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
76 #else
77 static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
78 #endif
79 static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
80     _atomic_cas_64_up;
81 RAS_DECL(_atomic_cas_64);
82 #endif
83 
84 #ifdef	__HAVE_ASM_ATOMIC_CAS_16_UP
85 extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
86 #else
87 static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
88 #endif
89 static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
90     _atomic_cas_16_up;
91 RAS_DECL(_atomic_cas_16);
92 
93 #ifdef	__HAVE_ASM_ATOMIC_CAS_8_UP
94 extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
95 #else
96 static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
97 #endif
98 static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
99     _atomic_cas_8_up;
100 RAS_DECL(_atomic_cas_8);
101 
102 #ifndef	__HAVE_ASM_ATOMIC_CAS_UP
103 static uint32_t
_atomic_cas_up(volatile uint32_t * ptr,uint32_t old,uint32_t new)104 _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
105 {
106 	uint32_t ret;
107 
108 	RAS_START(_atomic_cas);
109 	ret = *ptr;
110 	if (__predict_false(ret != old)) {
111 		return ret;
112 	}
113 	*ptr = new;
114 	RAS_END(_atomic_cas);
115 
116 	return ret;
117 }
118 #endif
119 
120 #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
121 static uint64_t
_atomic_cas_64_up(volatile uint64_t * ptr,uint64_t old,uint64_t new)122 _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
123 {
124 	uint64_t ret;
125 
126 	RAS_START(_atomic_cas_64);
127 	ret = *ptr;
128 	if (__predict_false(ret != old)) {
129 		return ret;
130 	}
131 	*ptr = new;
132 	RAS_END(_atomic_cas_64);
133 
134 	return ret;
135 }
136 #endif
137 
138 #ifndef	__HAVE_ASM_ATOMIC_CAS_16_UP
139 static uint16_t
_atomic_cas_16_up(volatile uint16_t * ptr,uint16_t old,uint16_t new)140 _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
141 {
142 	uint16_t ret;
143 
144 	RAS_START(_atomic_cas_16);
145 	ret = *ptr;
146 	if (__predict_false(ret != old)) {
147 		return ret;
148 	}
149 	*ptr = new;
150 	RAS_END(_atomic_cas_16);
151 
152 	return ret;
153 }
154 #endif
155 
156 #ifndef	__HAVE_ASM_ATOMIC_CAS_8_UP
157 static uint8_t
_atomic_cas_8_up(volatile uint8_t * ptr,uint8_t old,uint8_t new)158 _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
159 {
160 	uint8_t ret;
161 
162 	RAS_START(_atomic_cas_8);
163 	ret = *ptr;
164 	if (__predict_false(ret != old)) {
165 		return ret;
166 	}
167 	*ptr = new;
168 	RAS_END(_atomic_cas_8);
169 
170 	return ret;
171 }
172 #endif
173 
174 static uint32_t
_atomic_cas_mp(volatile uint32_t * ptr,uint32_t old,uint32_t new)175 _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
176 {
177 	__cpu_simple_lock_t *lock;
178 	uint32_t ret;
179 
180 	lock = &atomic_locks[HASH(ptr)];
181 	__cpu_simple_lock(lock);
182 	ret = *ptr;
183 	if (__predict_true(ret == old)) {
184 		*ptr = new;
185 	}
186 	__cpu_simple_unlock(lock);
187 
188 	return ret;
189 }
190 
191 #ifdef	__HAVE_ATOMIC_CAS_64_UP
192 static uint64_t
_atomic_cas_64_mp(volatile uint64_t * ptr,uint64_t old,uint64_t new)193 _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
194 {
195 	__cpu_simple_lock_t *lock;
196 	uint64_t ret;
197 
198 	lock = &atomic_locks[HASH(ptr)];
199 	__cpu_simple_lock(lock);
200 	ret = *ptr;
201 	if (__predict_true(ret == old)) {
202 		*ptr = new;
203 	}
204 	__cpu_simple_unlock(lock);
205 
206 	return ret;
207 }
208 #endif
209 
210 static uint16_t
_atomic_cas_16_mp(volatile uint16_t * ptr,uint16_t old,uint16_t new)211 _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
212 {
213 	__cpu_simple_lock_t *lock;
214 	uint16_t ret;
215 
216 	lock = &atomic_locks[HASH(ptr)];
217 	__cpu_simple_lock(lock);
218 	ret = *ptr;
219 	if (__predict_true(ret == old)) {
220 		*ptr = new;
221 	}
222 	__cpu_simple_unlock(lock);
223 
224 	return ret;
225 }
226 
227 static uint8_t
_atomic_cas_8_mp(volatile uint8_t * ptr,uint8_t old,uint8_t new)228 _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
229 {
230 	__cpu_simple_lock_t *lock;
231 	uint8_t ret;
232 
233 	lock = &atomic_locks[HASH(ptr)];
234 	__cpu_simple_lock(lock);
235 	ret = *ptr;
236 	if (__predict_true(ret == old)) {
237 		*ptr = new;
238 	}
239 	__cpu_simple_unlock(lock);
240 
241 	return ret;
242 }
243 
244 uint32_t
_atomic_cas_32(volatile uint32_t * ptr,uint32_t old,uint32_t new)245 _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
246 {
247 
248 	return (*_atomic_cas_fn)(ptr, old, new);
249 }
250 
251 #ifdef	__HAVE_ATOMIC_CAS_64_UP
252 uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
253 
254 uint64_t
_atomic_cas_64(volatile uint64_t * ptr,uint64_t old,uint64_t new)255 _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
256 {
257 
258 	return (*_atomic_cas_64_fn)(ptr, old, new);
259 }
260 #endif
261 
262 uint16_t
_atomic_cas_16(volatile uint16_t * ptr,uint16_t old,uint16_t new)263 _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
264 {
265 
266 	return (*_atomic_cas_16_fn)(ptr, old, new);
267 }
268 
269 uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
270 
271 uint8_t
_atomic_cas_8(volatile uint8_t * ptr,uint8_t old,uint8_t new)272 _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
273 {
274 
275 	return (*_atomic_cas_8_fn)(ptr, old, new);
276 }
277 
278 void __section(".text.startup") __attribute__ ((__visibility__("hidden")))
__libc_atomic_init(void)279 __libc_atomic_init(void)
280 {
281 	int ncpu, mib[2];
282 	size_t len;
283 
284 	_atomic_cas_fn = _atomic_cas_mp;
285 #ifdef	__HAVE_ATOMIC_CAS_64_UP
286 	_atomic_cas_64_fn = _atomic_cas_64_mp;
287 #endif
288 	_atomic_cas_16_fn = _atomic_cas_16_mp;
289 	_atomic_cas_8_fn = _atomic_cas_8_mp;
290 
291 	mib[0] = CTL_HW;
292 	mib[1] = HW_NCPU;
293 	len = sizeof(ncpu);
294 	if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
295 		return;
296 	if (ncpu > 1)
297 		return;
298 
299 	if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
300 	    RAS_INSTALL) == 0) {
301 		_atomic_cas_fn = _atomic_cas_up;
302 	}
303 
304 
305 #ifdef	__HAVE_ATOMIC_CAS_64_UP
306 	if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
307 	    RAS_INSTALL) == 0) {
308 		_atomic_cas_64_fn = _atomic_cas_64_up;
309 	}
310 #endif
311 
312 	if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
313 	    RAS_INSTALL) == 0) {
314 		_atomic_cas_16_fn = _atomic_cas_16_up;
315 	}
316 
317 	if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
318 	    RAS_INSTALL) == 0) {
319 		_atomic_cas_8_fn = _atomic_cas_8_up;
320 	}
321 }
322 
323 #undef atomic_cas_32
324 #undef atomic_cas_uint
325 #undef atomic_cas_ulong
326 #undef atomic_cas_ptr
327 #undef atomic_cas_32_ni
328 #undef atomic_cas_uint_ni
329 #undef atomic_cas_ulong_ni
330 #undef atomic_cas_ptr_ni
331 
332 atomic_op_alias(atomic_cas_32,_atomic_cas_32)
333 atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
334 __strong_alias(_atomic_cas_uint,_atomic_cas_32)
335 atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
336 __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
337 atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
338 __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
339 
340 atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
341 __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
342 atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
343 __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
344 atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
345 __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
346 atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
347 __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
348 
349 //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
350 //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
351 //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
352 //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
353 #ifdef	__HAVE_ATOMIC_CAS_64_UP
354 atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
355 __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
356 crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
357 #endif
358 crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
359 crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
360 crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
361