1 /*- 2 * SPDX-License-Identifier: BSD-2-Clause 3 * 4 * Copyright (c) 2011 Ed Schouten <ed@FreeBSD.org> 5 * David Chisnall <theraven@FreeBSD.org> 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice, this list of conditions and the following disclaimer. 13 * 2. Redistributions in binary form must reproduce the above copyright 14 * notice, this list of conditions and the following disclaimer in the 15 * documentation and/or other materials provided with the distribution. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 27 * SUCH DAMAGE. 28 */ 29 30 #ifndef _STDATOMIC_H_ 31 #define _STDATOMIC_H_ 32 33 #include <sys/cdefs.h> 34 #include <sys/_types.h> 35 36 #if (__has_extension(c_atomic) || __has_extension(cxx_atomic)) && \ 37 defined(__clang__) 38 #define __CLANG_ATOMICS 39 #elif __GNUC_PREREQ__(4, 7) 40 #define __GNUC_ATOMICS 41 #elif defined(__GNUC__) 42 #define __SYNC_ATOMICS 43 #else 44 #error "stdatomic.h does not support your compiler" 45 #endif 46 47 /* 48 * 7.17.1 Atomic lock-free macros. 49 */ 50 51 #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE 52 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE 53 #endif 54 #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE 55 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE 56 #endif 57 #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE 58 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE 59 #endif 60 #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE 61 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE 62 #endif 63 #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE 64 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE 65 #endif 66 #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE 67 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE 68 #endif 69 #ifdef __GCC_ATOMIC_INT_LOCK_FREE 70 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE 71 #endif 72 #ifdef __GCC_ATOMIC_LONG_LOCK_FREE 73 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE 74 #endif 75 #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE 76 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE 77 #endif 78 #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE 79 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE 80 #endif 81 82 /* 83 * 7.17.2 Initialization. 84 */ 85 86 #if defined(__CLANG_ATOMICS) 87 #define ATOMIC_VAR_INIT(value) (value) 88 #define atomic_init(obj, value) __c11_atomic_init(obj, value) 89 #else 90 #define ATOMIC_VAR_INIT(value) { .__val = (value) } 91 #define atomic_init(obj, value) ((void)((obj)->__val = (value))) 92 #endif 93 94 /* 95 * Clang and recent GCC both provide predefined macros for the memory 96 * orderings. If we are using a compiler that doesn't define them, use the 97 * clang values - these will be ignored in the fallback path. 98 */ 99 100 #ifndef __ATOMIC_RELAXED 101 #define __ATOMIC_RELAXED 0 102 #endif 103 #ifndef __ATOMIC_CONSUME 104 #define __ATOMIC_CONSUME 1 105 #endif 106 #ifndef __ATOMIC_ACQUIRE 107 #define __ATOMIC_ACQUIRE 2 108 #endif 109 #ifndef __ATOMIC_RELEASE 110 #define __ATOMIC_RELEASE 3 111 #endif 112 #ifndef __ATOMIC_ACQ_REL 113 #define __ATOMIC_ACQ_REL 4 114 #endif 115 #ifndef __ATOMIC_SEQ_CST 116 #define __ATOMIC_SEQ_CST 5 117 #endif 118 119 /* 120 * 7.17.3 Order and consistency. 121 * 122 * The memory_order_* constants that denote the barrier behaviour of the 123 * atomic operations. 124 */ 125 126 typedef enum { 127 memory_order_relaxed = __ATOMIC_RELAXED, 128 memory_order_consume = __ATOMIC_CONSUME, 129 memory_order_acquire = __ATOMIC_ACQUIRE, 130 memory_order_release = __ATOMIC_RELEASE, 131 memory_order_acq_rel = __ATOMIC_ACQ_REL, 132 memory_order_seq_cst = __ATOMIC_SEQ_CST 133 } memory_order; 134 135 /* 136 * 7.17.4 Fences. 137 */ 138 139 static __inline void 140 atomic_thread_fence(memory_order __order __unused) 141 { 142 143 #ifdef __CLANG_ATOMICS 144 __c11_atomic_thread_fence(__order); 145 #elif defined(__GNUC_ATOMICS) 146 __atomic_thread_fence(__order); 147 #else 148 __sync_synchronize(); 149 #endif 150 } 151 152 static __inline void 153 atomic_signal_fence(memory_order __order __unused) 154 { 155 156 #ifdef __CLANG_ATOMICS 157 __c11_atomic_signal_fence(__order); 158 #elif defined(__GNUC_ATOMICS) 159 __atomic_signal_fence(__order); 160 #else 161 __asm volatile ("" ::: "memory"); 162 #endif 163 } 164 165 #if defined(__cplusplus) && !defined(_Bool) 166 #define _Bool bool 167 #define __bool_locally_defined 168 #endif 169 170 /* 171 * 7.17.5 Lock-free property. 172 */ 173 174 #if defined(_KERNEL) 175 /* Atomics in kernelspace are always lock-free. */ 176 #define atomic_is_lock_free(obj) \ 177 ((void)(obj), (_Bool)1) 178 #elif defined(__CLANG_ATOMICS) || defined(__GNUC_ATOMICS) 179 #define atomic_is_lock_free(obj) \ 180 __atomic_is_lock_free(sizeof(*(obj)), obj) 181 #else 182 #define atomic_is_lock_free(obj) \ 183 ((void)(obj), sizeof((obj)->__val) <= sizeof(void *)) 184 #endif 185 186 /* 187 * 7.17.6 Atomic integer types. 188 */ 189 190 typedef _Atomic(_Bool) atomic_bool; 191 typedef _Atomic(char) atomic_char; 192 typedef _Atomic(signed char) atomic_schar; 193 typedef _Atomic(unsigned char) atomic_uchar; 194 typedef _Atomic(short) atomic_short; 195 typedef _Atomic(unsigned short) atomic_ushort; 196 typedef _Atomic(int) atomic_int; 197 typedef _Atomic(unsigned int) atomic_uint; 198 typedef _Atomic(long) atomic_long; 199 typedef _Atomic(unsigned long) atomic_ulong; 200 typedef _Atomic(long long) atomic_llong; 201 typedef _Atomic(unsigned long long) atomic_ullong; 202 typedef _Atomic(__char16_t) atomic_char16_t; 203 typedef _Atomic(__char32_t) atomic_char32_t; 204 typedef _Atomic(___wchar_t) atomic_wchar_t; 205 typedef _Atomic(__int_least8_t) atomic_int_least8_t; 206 typedef _Atomic(__uint_least8_t) atomic_uint_least8_t; 207 typedef _Atomic(__int_least16_t) atomic_int_least16_t; 208 typedef _Atomic(__uint_least16_t) atomic_uint_least16_t; 209 typedef _Atomic(__int_least32_t) atomic_int_least32_t; 210 typedef _Atomic(__uint_least32_t) atomic_uint_least32_t; 211 typedef _Atomic(__int_least64_t) atomic_int_least64_t; 212 typedef _Atomic(__uint_least64_t) atomic_uint_least64_t; 213 typedef _Atomic(__int_fast8_t) atomic_int_fast8_t; 214 typedef _Atomic(__uint_fast8_t) atomic_uint_fast8_t; 215 typedef _Atomic(__int_fast16_t) atomic_int_fast16_t; 216 typedef _Atomic(__uint_fast16_t) atomic_uint_fast16_t; 217 typedef _Atomic(__int_fast32_t) atomic_int_fast32_t; 218 typedef _Atomic(__uint_fast32_t) atomic_uint_fast32_t; 219 typedef _Atomic(__int_fast64_t) atomic_int_fast64_t; 220 typedef _Atomic(__uint_fast64_t) atomic_uint_fast64_t; 221 typedef _Atomic(__intptr_t) atomic_intptr_t; 222 typedef _Atomic(__uintptr_t) atomic_uintptr_t; 223 typedef _Atomic(__size_t) atomic_size_t; 224 typedef _Atomic(__ptrdiff_t) atomic_ptrdiff_t; 225 typedef _Atomic(__intmax_t) atomic_intmax_t; 226 typedef _Atomic(__uintmax_t) atomic_uintmax_t; 227 228 /* 229 * 7.17.7 Operations on atomic types. 230 */ 231 232 /* 233 * Compiler-specific operations. 234 */ 235 236 #if defined(__CLANG_ATOMICS) 237 #define atomic_compare_exchange_strong_explicit(object, expected, \ 238 desired, success, failure) \ 239 __c11_atomic_compare_exchange_strong(object, expected, desired, \ 240 success, failure) 241 #define atomic_compare_exchange_weak_explicit(object, expected, \ 242 desired, success, failure) \ 243 __c11_atomic_compare_exchange_weak(object, expected, desired, \ 244 success, failure) 245 #define atomic_exchange_explicit(object, desired, order) \ 246 __c11_atomic_exchange(object, desired, order) 247 #define atomic_fetch_add_explicit(object, operand, order) \ 248 __c11_atomic_fetch_add(object, operand, order) 249 #define atomic_fetch_and_explicit(object, operand, order) \ 250 __c11_atomic_fetch_and(object, operand, order) 251 #define atomic_fetch_or_explicit(object, operand, order) \ 252 __c11_atomic_fetch_or(object, operand, order) 253 #define atomic_fetch_sub_explicit(object, operand, order) \ 254 __c11_atomic_fetch_sub(object, operand, order) 255 #define atomic_fetch_xor_explicit(object, operand, order) \ 256 __c11_atomic_fetch_xor(object, operand, order) 257 #define atomic_load_explicit(object, order) \ 258 __c11_atomic_load(object, order) 259 #define atomic_store_explicit(object, desired, order) \ 260 __c11_atomic_store(object, desired, order) 261 #elif defined(__GNUC_ATOMICS) 262 #define atomic_compare_exchange_strong_explicit(object, expected, \ 263 desired, success, failure) \ 264 __atomic_compare_exchange_n(object, expected, \ 265 desired, 0, success, failure) 266 #define atomic_compare_exchange_weak_explicit(object, expected, \ 267 desired, success, failure) \ 268 __atomic_compare_exchange_n(object, expected, \ 269 desired, 1, success, failure) 270 #define atomic_exchange_explicit(object, desired, order) \ 271 __atomic_exchange_n(object, desired, order) 272 #define atomic_fetch_add_explicit(object, operand, order) \ 273 __atomic_fetch_add(object, operand, order) 274 #define atomic_fetch_and_explicit(object, operand, order) \ 275 __atomic_fetch_and(object, operand, order) 276 #define atomic_fetch_or_explicit(object, operand, order) \ 277 __atomic_fetch_or(object, operand, order) 278 #define atomic_fetch_sub_explicit(object, operand, order) \ 279 __atomic_fetch_sub(object, operand, order) 280 #define atomic_fetch_xor_explicit(object, operand, order) \ 281 __atomic_fetch_xor(object, operand, order) 282 #define atomic_load_explicit(object, order) \ 283 __atomic_load_n(object, order) 284 #define atomic_store_explicit(object, desired, order) \ 285 __atomic_store_n(object, desired, order) 286 #else 287 #define __atomic_apply_stride(object, operand) \ 288 (((__typeof__((object)->__val))0) + (operand)) 289 #define atomic_compare_exchange_strong_explicit(object, expected, \ 290 desired, success, failure) __extension__ ({ \ 291 __typeof__(expected) __ep = (expected); \ 292 __typeof__(*__ep) __e = *__ep; \ 293 (void)(success); (void)(failure); \ 294 (_Bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val, \ 295 __e, desired)) == __e); \ 296 }) 297 #define atomic_compare_exchange_weak_explicit(object, expected, \ 298 desired, success, failure) \ 299 atomic_compare_exchange_strong_explicit(object, expected, \ 300 desired, success, failure) 301 #if __has_builtin(__sync_swap) 302 /* Clang provides a full-barrier atomic exchange - use it if available. */ 303 #define atomic_exchange_explicit(object, desired, order) \ 304 ((void)(order), __sync_swap(&(object)->__val, desired)) 305 #else 306 /* 307 * __sync_lock_test_and_set() is only an acquire barrier in theory (although in 308 * practice it is usually a full barrier) so we need an explicit barrier before 309 * it. 310 */ 311 #define atomic_exchange_explicit(object, desired, order) \ 312 __extension__ ({ \ 313 __typeof__(object) __o = (object); \ 314 __typeof__(desired) __d = (desired); \ 315 (void)(order); \ 316 __sync_synchronize(); \ 317 __sync_lock_test_and_set(&(__o)->__val, __d); \ 318 }) 319 #endif 320 #define atomic_fetch_add_explicit(object, operand, order) \ 321 ((void)(order), __sync_fetch_and_add(&(object)->__val, \ 322 __atomic_apply_stride(object, operand))) 323 #define atomic_fetch_and_explicit(object, operand, order) \ 324 ((void)(order), __sync_fetch_and_and(&(object)->__val, operand)) 325 #define atomic_fetch_or_explicit(object, operand, order) \ 326 ((void)(order), __sync_fetch_and_or(&(object)->__val, operand)) 327 #define atomic_fetch_sub_explicit(object, operand, order) \ 328 ((void)(order), __sync_fetch_and_sub(&(object)->__val, \ 329 __atomic_apply_stride(object, operand))) 330 #define atomic_fetch_xor_explicit(object, operand, order) \ 331 ((void)(order), __sync_fetch_and_xor(&(object)->__val, operand)) 332 #define atomic_load_explicit(object, order) \ 333 ((void)(order), __sync_fetch_and_add(&(object)->__val, 0)) 334 #define atomic_store_explicit(object, desired, order) \ 335 ((void)atomic_exchange_explicit(object, desired, order)) 336 #endif 337 338 /* 339 * Convenience functions. 340 * 341 * Don't provide these in kernel space. In kernel space, we should be 342 * disciplined enough to always provide explicit barriers. 343 */ 344 345 #ifndef _KERNEL 346 #define atomic_compare_exchange_strong(object, expected, desired) \ 347 atomic_compare_exchange_strong_explicit(object, expected, \ 348 desired, memory_order_seq_cst, memory_order_seq_cst) 349 #define atomic_compare_exchange_weak(object, expected, desired) \ 350 atomic_compare_exchange_weak_explicit(object, expected, \ 351 desired, memory_order_seq_cst, memory_order_seq_cst) 352 #define atomic_exchange(object, desired) \ 353 atomic_exchange_explicit(object, desired, memory_order_seq_cst) 354 #define atomic_fetch_add(object, operand) \ 355 atomic_fetch_add_explicit(object, operand, memory_order_seq_cst) 356 #define atomic_fetch_and(object, operand) \ 357 atomic_fetch_and_explicit(object, operand, memory_order_seq_cst) 358 #define atomic_fetch_or(object, operand) \ 359 atomic_fetch_or_explicit(object, operand, memory_order_seq_cst) 360 #define atomic_fetch_sub(object, operand) \ 361 atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst) 362 #define atomic_fetch_xor(object, operand) \ 363 atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst) 364 #define atomic_load(object) \ 365 atomic_load_explicit(object, memory_order_seq_cst) 366 #define atomic_store(object, desired) \ 367 atomic_store_explicit(object, desired, memory_order_seq_cst) 368 #endif /* !_KERNEL */ 369 370 /* 371 * 7.17.8 Atomic flag type and operations. 372 * 373 * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some 374 * kind of compiler built-in type we could use? 375 */ 376 377 typedef struct { 378 atomic_bool __flag; 379 } atomic_flag; 380 #define ATOMIC_FLAG_INIT { ATOMIC_VAR_INIT(0) } 381 382 static __inline _Bool 383 atomic_flag_test_and_set_explicit(volatile atomic_flag *__object, 384 memory_order __order) 385 { 386 return (atomic_exchange_explicit(&__object->__flag, 1, __order)); 387 } 388 389 static __inline void 390 atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order) 391 { 392 393 atomic_store_explicit(&__object->__flag, 0, __order); 394 } 395 396 #ifndef _KERNEL 397 static __inline _Bool 398 atomic_flag_test_and_set(volatile atomic_flag *__object) 399 { 400 401 return (atomic_flag_test_and_set_explicit(__object, 402 memory_order_seq_cst)); 403 } 404 405 static __inline void 406 atomic_flag_clear(volatile atomic_flag *__object) 407 { 408 409 atomic_flag_clear_explicit(__object, memory_order_seq_cst); 410 } 411 #endif /* !_KERNEL */ 412 413 #ifdef __bool_locally_defined 414 #undef _Bool 415 #undef __bool_locally_defined 416 #endif 417 418 #endif /* !_STDATOMIC_H_ */ 419