xref: /netbsd-src/common/lib/libc/atomic/atomic_init_testset.c (revision 3f351f34c6d827cf017cdcff3543f6ec0c88b420)
1 /*	$NetBSD: atomic_init_testset.c,v 1.18 2024/01/19 19:33:49 christos Exp $	*/
2 
3 /*-
4  * Copyright (c) 2008 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26  * POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 /*
30  * libc glue for atomic operations where the hardware does not provide
31  * compare-and-swap.  It's assumed that this will only be used on 32-bit
32  * platforms.
33  *
34  * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35  * if using gcc.
36  */
37 
38 #include <sys/cdefs.h>
39 __RCSID("$NetBSD: atomic_init_testset.c,v 1.18 2024/01/19 19:33:49 christos Exp $");
40 
41 #include "atomic_op_namespace.h"
42 
43 #include <sys/types.h>
44 #include <sys/atomic.h>
45 #include <sys/lock.h>
46 #include <sys/ras.h>
47 #include <sys/sysctl.h>
48 
49 #include <string.h>
50 
51 #define	I2	__SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
52 #define	I16	I2 I2 I2 I2 I2 I2 I2 I2
53 #define	I128	I16 I16 I16 I16 I16 I16 I16 I16
54 
55 static __cpu_simple_lock_t atomic_locks[128] = { I128 };
56 /*
57  * Pick a lock out of above array depending on the object address
58  * passed. Most variables used atomically will not be in the same
59  * cacheline - and if they are, using the same lock is fine.
60  */
61 #define HASH(PTR)	(((uintptr_t)(PTR) >> 3) & 127)
62 
63 #ifdef	__HAVE_ASM_ATOMIC_CAS_UP
64 extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
65 #else
66 static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
67 #endif
68 static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
69     _atomic_cas_up;
70 RAS_DECL(_atomic_cas);
71 
72 #ifdef	__HAVE_ATOMIC_CAS_64_UP
73 #ifdef	__HAVE_ASM_ATOMIC_CAS_64_UP
74 extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
75 #else
76 static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
77 #endif
78 static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
79     _atomic_cas_64_up;
80 RAS_DECL(_atomic_cas_64);
81 #endif
82 
83 #ifdef	__HAVE_ASM_ATOMIC_CAS_16_UP
84 extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
85 #else
86 static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
87 #endif
88 static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
89     _atomic_cas_16_up;
90 RAS_DECL(_atomic_cas_16);
91 
92 #ifdef	__HAVE_ASM_ATOMIC_CAS_8_UP
93 extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
94 #else
95 static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
96 #endif
97 static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
98     _atomic_cas_8_up;
99 RAS_DECL(_atomic_cas_8);
100 
101 #ifndef	__HAVE_ASM_ATOMIC_CAS_UP
102 static uint32_t
103 _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
104 {
105 	uint32_t ret;
106 
107 	RAS_START(_atomic_cas);
108 	ret = *ptr;
109 	if (__predict_false(ret != old)) {
110 		return ret;
111 	}
112 	*ptr = new;
113 	RAS_END(_atomic_cas);
114 
115 	return ret;
116 }
117 #endif
118 
119 #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
120 static uint64_t
121 _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
122 {
123 	uint64_t ret;
124 
125 	RAS_START(_atomic_cas_64);
126 	ret = *ptr;
127 	if (__predict_false(ret != old)) {
128 		return ret;
129 	}
130 	*ptr = new;
131 	RAS_END(_atomic_cas_64);
132 
133 	return ret;
134 }
135 #endif
136 
137 #ifndef	__HAVE_ASM_ATOMIC_CAS_16_UP
138 static uint16_t
139 _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
140 {
141 	uint16_t ret;
142 
143 	RAS_START(_atomic_cas_16);
144 	ret = *ptr;
145 	if (__predict_false(ret != old)) {
146 		return ret;
147 	}
148 	*ptr = new;
149 	RAS_END(_atomic_cas_16);
150 
151 	return ret;
152 }
153 #endif
154 
155 #ifndef	__HAVE_ASM_ATOMIC_CAS_8_UP
156 static uint8_t
157 _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
158 {
159 	uint8_t ret;
160 
161 	RAS_START(_atomic_cas_8);
162 	ret = *ptr;
163 	if (__predict_false(ret != old)) {
164 		return ret;
165 	}
166 	*ptr = new;
167 	RAS_END(_atomic_cas_8);
168 
169 	return ret;
170 }
171 #endif
172 
173 static uint32_t
174 _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
175 {
176 	__cpu_simple_lock_t *lock;
177 	uint32_t ret;
178 
179 	lock = &atomic_locks[HASH(ptr)];
180 	__cpu_simple_lock(lock);
181 	ret = *ptr;
182 	if (__predict_true(ret == old)) {
183 		*ptr = new;
184 	}
185 	__cpu_simple_unlock(lock);
186 
187 	return ret;
188 }
189 
190 #ifdef	__HAVE_ATOMIC_CAS_64_UP
191 static uint64_t
192 _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
193 {
194 	__cpu_simple_lock_t *lock;
195 	uint64_t ret;
196 
197 	lock = &atomic_locks[HASH(ptr)];
198 	__cpu_simple_lock(lock);
199 	ret = *ptr;
200 	if (__predict_true(ret == old)) {
201 		*ptr = new;
202 	}
203 	__cpu_simple_unlock(lock);
204 
205 	return ret;
206 }
207 #endif
208 
209 static uint16_t
210 _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
211 {
212 	__cpu_simple_lock_t *lock;
213 	uint16_t ret;
214 
215 	lock = &atomic_locks[HASH(ptr)];
216 	__cpu_simple_lock(lock);
217 	ret = *ptr;
218 	if (__predict_true(ret == old)) {
219 		*ptr = new;
220 	}
221 	__cpu_simple_unlock(lock);
222 
223 	return ret;
224 }
225 
226 static uint8_t
227 _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
228 {
229 	__cpu_simple_lock_t *lock;
230 	uint8_t ret;
231 
232 	lock = &atomic_locks[HASH(ptr)];
233 	__cpu_simple_lock(lock);
234 	ret = *ptr;
235 	if (__predict_true(ret == old)) {
236 		*ptr = new;
237 	}
238 	__cpu_simple_unlock(lock);
239 
240 	return ret;
241 }
242 
243 uint32_t
244 _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
245 {
246 
247 	return (*_atomic_cas_fn)(ptr, old, new);
248 }
249 
250 #ifdef	__HAVE_ATOMIC_CAS_64_UP
251 uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
252 
253 uint64_t
254 _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
255 {
256 
257 	return (*_atomic_cas_64_fn)(ptr, old, new);
258 }
259 #endif
260 
261 uint16_t
262 _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
263 {
264 
265 	return (*_atomic_cas_16_fn)(ptr, old, new);
266 }
267 
268 uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
269 
270 uint8_t
271 _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
272 {
273 
274 	return (*_atomic_cas_8_fn)(ptr, old, new);
275 }
276 
277 void __section(".text.startup") __attribute__ ((__visibility__("hidden")))
278 __libc_atomic_init(void)
279 {
280 	int ncpu, mib[2];
281 	size_t len;
282 
283 	_atomic_cas_fn = _atomic_cas_mp;
284 #ifdef	__HAVE_ATOMIC_CAS_64_UP
285 	_atomic_cas_64_fn = _atomic_cas_64_mp;
286 #endif
287 	_atomic_cas_16_fn = _atomic_cas_16_mp;
288 	_atomic_cas_8_fn = _atomic_cas_8_mp;
289 
290 	mib[0] = CTL_HW;
291 	mib[1] = HW_NCPU;
292 	len = sizeof(ncpu);
293 	if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
294 		return;
295 	if (ncpu > 1)
296 		return;
297 
298 	if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
299 	    RAS_INSTALL) == 0) {
300 		_atomic_cas_fn = _atomic_cas_up;
301 	}
302 
303 
304 #ifdef	__HAVE_ATOMIC_CAS_64_UP
305 	if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
306 	    RAS_INSTALL) == 0) {
307 		_atomic_cas_64_fn = _atomic_cas_64_up;
308 	}
309 #endif
310 
311 	if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
312 	    RAS_INSTALL) == 0) {
313 		_atomic_cas_16_fn = _atomic_cas_16_up;
314 	}
315 
316 	if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
317 	    RAS_INSTALL) == 0) {
318 		_atomic_cas_8_fn = _atomic_cas_8_up;
319 	}
320 }
321 
322 #undef atomic_cas_32
323 #undef atomic_cas_uint
324 #undef atomic_cas_ulong
325 #undef atomic_cas_ptr
326 #undef atomic_cas_32_ni
327 #undef atomic_cas_uint_ni
328 #undef atomic_cas_ulong_ni
329 #undef atomic_cas_ptr_ni
330 
331 atomic_op_alias(atomic_cas_32,_atomic_cas_32)
332 atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
333 __strong_alias(_atomic_cas_uint,_atomic_cas_32)
334 atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
335 __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
336 atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
337 __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
338 
339 atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
340 __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
341 atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
342 __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
343 atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
344 __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
345 atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
346 __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
347 
348 //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
349 //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
350 //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
351 //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
352 #ifdef	__HAVE_ATOMIC_CAS_64_UP
353 atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
354 __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
355 crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
356 #endif
357 crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
358 crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
359 crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
360