xref: /freebsd-src/contrib/llvm-project/compiler-rt/lib/builtins/atomic.c (revision 0fca6ea1d4eea4c934cfff25ac9ee8ad6fe95583)
10b57cec5SDimitry Andric //===-- atomic.c - Implement support functions for atomic operations.------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric //  atomic.c defines a set of functions for performing atomic accesses on
100b57cec5SDimitry Andric //  arbitrary-sized memory locations.  This design uses locks that should
110b57cec5SDimitry Andric //  be fast in the uncontended case, for two reasons:
120b57cec5SDimitry Andric //
130b57cec5SDimitry Andric //  1) This code must work with C programs that do not link to anything
140b57cec5SDimitry Andric //     (including pthreads) and so it should not depend on any pthread
15*0fca6ea1SDimitry Andric //     functions. If the user wishes to opt into using pthreads, they may do so.
160b57cec5SDimitry Andric //  2) Atomic operations, rather than explicit mutexes, are most commonly used
170b57cec5SDimitry Andric //     on code where contended operations are rate.
180b57cec5SDimitry Andric //
190b57cec5SDimitry Andric //  To avoid needing a per-object lock, this code allocates an array of
200b57cec5SDimitry Andric //  locks and hashes the object pointers to find the one that it should use.
210b57cec5SDimitry Andric //  For operations that must be atomic on two locations, the lower lock is
220b57cec5SDimitry Andric //  always acquired first, to avoid deadlock.
230b57cec5SDimitry Andric //
240b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
250b57cec5SDimitry Andric 
265ffd83dbSDimitry Andric #include <stdbool.h>
27fe6060f1SDimitry Andric #include <stddef.h>
280b57cec5SDimitry Andric #include <stdint.h>
290b57cec5SDimitry Andric 
300b57cec5SDimitry Andric #include "assembly.h"
310b57cec5SDimitry Andric 
32fe6060f1SDimitry Andric // We use __builtin_mem* here to avoid dependencies on libc-provided headers.
33fe6060f1SDimitry Andric #define memcpy __builtin_memcpy
34fe6060f1SDimitry Andric #define memcmp __builtin_memcmp
35fe6060f1SDimitry Andric 
360b57cec5SDimitry Andric // Clang objects if you redefine a builtin.  This little hack allows us to
370b57cec5SDimitry Andric // define a function with the same name as an intrinsic.
380b57cec5SDimitry Andric #pragma redefine_extname __atomic_load_c SYMBOL_NAME(__atomic_load)
390b57cec5SDimitry Andric #pragma redefine_extname __atomic_store_c SYMBOL_NAME(__atomic_store)
400b57cec5SDimitry Andric #pragma redefine_extname __atomic_exchange_c SYMBOL_NAME(__atomic_exchange)
410b57cec5SDimitry Andric #pragma redefine_extname __atomic_compare_exchange_c SYMBOL_NAME(              \
420b57cec5SDimitry Andric     __atomic_compare_exchange)
43e8d8bef9SDimitry Andric #pragma redefine_extname __atomic_is_lock_free_c SYMBOL_NAME(                  \
44e8d8bef9SDimitry Andric     __atomic_is_lock_free)
450b57cec5SDimitry Andric 
460b57cec5SDimitry Andric /// Number of locks.  This allocates one page on 32-bit platforms, two on
470b57cec5SDimitry Andric /// 64-bit.  This can be specified externally if a different trade between
480b57cec5SDimitry Andric /// memory usage and contention probability is required for a given platform.
490b57cec5SDimitry Andric #ifndef SPINLOCK_COUNT
500b57cec5SDimitry Andric #define SPINLOCK_COUNT (1 << 10)
510b57cec5SDimitry Andric #endif
520b57cec5SDimitry Andric static const long SPINLOCK_MASK = SPINLOCK_COUNT - 1;
530b57cec5SDimitry Andric 
540b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
550b57cec5SDimitry Andric // Platform-specific lock implementation.  Falls back to spinlocks if none is
560b57cec5SDimitry Andric // defined.  Each platform should define the Lock type, and corresponding
570b57cec5SDimitry Andric // lock() and unlock() functions.
580b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
59*0fca6ea1SDimitry Andric #if defined(_LIBATOMIC_USE_PTHREAD)
60*0fca6ea1SDimitry Andric #include <pthread.h>
61*0fca6ea1SDimitry Andric typedef pthread_mutex_t Lock;
62*0fca6ea1SDimitry Andric /// Unlock a lock.  This is a release operation.
63*0fca6ea1SDimitry Andric __inline static void unlock(Lock *l) { pthread_mutex_unlock(l); }
64*0fca6ea1SDimitry Andric /// Locks a lock.
65*0fca6ea1SDimitry Andric __inline static void lock(Lock *l) { pthread_mutex_lock(l); }
66*0fca6ea1SDimitry Andric /// locks for atomic operations
67*0fca6ea1SDimitry Andric static Lock locks[SPINLOCK_COUNT];
68*0fca6ea1SDimitry Andric 
69*0fca6ea1SDimitry Andric #elif defined(__FreeBSD__) || defined(__DragonFly__)
700b57cec5SDimitry Andric #include <errno.h>
7168d75effSDimitry Andric // clang-format off
720b57cec5SDimitry Andric #include <sys/types.h>
737b6b882fSJustin Hibbits #include <machine/atomic.h>
740b57cec5SDimitry Andric #include <sys/umtx.h>
7568d75effSDimitry Andric // clang-format on
760b57cec5SDimitry Andric typedef struct _usem Lock;
770b57cec5SDimitry Andric __inline static void unlock(Lock *l) {
780b57cec5SDimitry Andric   __c11_atomic_store((_Atomic(uint32_t) *)&l->_count, 1, __ATOMIC_RELEASE);
790b57cec5SDimitry Andric   __c11_atomic_thread_fence(__ATOMIC_SEQ_CST);
800b57cec5SDimitry Andric   if (l->_has_waiters)
810b57cec5SDimitry Andric     _umtx_op(l, UMTX_OP_SEM_WAKE, 1, 0, 0);
820b57cec5SDimitry Andric }
830b57cec5SDimitry Andric __inline static void lock(Lock *l) {
840b57cec5SDimitry Andric   uint32_t old = 1;
850b57cec5SDimitry Andric   while (!__c11_atomic_compare_exchange_weak((_Atomic(uint32_t) *)&l->_count,
860b57cec5SDimitry Andric                                              &old, 0, __ATOMIC_ACQUIRE,
870b57cec5SDimitry Andric                                              __ATOMIC_RELAXED)) {
880b57cec5SDimitry Andric     _umtx_op(l, UMTX_OP_SEM_WAIT, 0, 0, 0);
890b57cec5SDimitry Andric     old = 1;
900b57cec5SDimitry Andric   }
910b57cec5SDimitry Andric }
920b57cec5SDimitry Andric /// locks for atomic operations
930b57cec5SDimitry Andric static Lock locks[SPINLOCK_COUNT] = {[0 ... SPINLOCK_COUNT - 1] = {0, 1, 0}};
940b57cec5SDimitry Andric 
950b57cec5SDimitry Andric #elif defined(__APPLE__)
960b57cec5SDimitry Andric #include <libkern/OSAtomic.h>
970b57cec5SDimitry Andric typedef OSSpinLock Lock;
980b57cec5SDimitry Andric __inline static void unlock(Lock *l) { OSSpinLockUnlock(l); }
990b57cec5SDimitry Andric /// Locks a lock.  In the current implementation, this is potentially
1000b57cec5SDimitry Andric /// unbounded in the contended case.
1010b57cec5SDimitry Andric __inline static void lock(Lock *l) { OSSpinLockLock(l); }
1020b57cec5SDimitry Andric static Lock locks[SPINLOCK_COUNT]; // initialized to OS_SPINLOCK_INIT which is 0
1030b57cec5SDimitry Andric 
1040b57cec5SDimitry Andric #else
10581ad6265SDimitry Andric _Static_assert(__atomic_always_lock_free(sizeof(uintptr_t), 0),
10681ad6265SDimitry Andric                "Implementation assumes lock-free pointer-size cmpxchg");
1070b57cec5SDimitry Andric typedef _Atomic(uintptr_t) Lock;
1080b57cec5SDimitry Andric /// Unlock a lock.  This is a release operation.
1090b57cec5SDimitry Andric __inline static void unlock(Lock *l) {
1100b57cec5SDimitry Andric   __c11_atomic_store(l, 0, __ATOMIC_RELEASE);
1110b57cec5SDimitry Andric }
1120b57cec5SDimitry Andric /// Locks a lock.  In the current implementation, this is potentially
1130b57cec5SDimitry Andric /// unbounded in the contended case.
1140b57cec5SDimitry Andric __inline static void lock(Lock *l) {
1150b57cec5SDimitry Andric   uintptr_t old = 0;
1160b57cec5SDimitry Andric   while (!__c11_atomic_compare_exchange_weak(l, &old, 1, __ATOMIC_ACQUIRE,
1170b57cec5SDimitry Andric                                              __ATOMIC_RELAXED))
1180b57cec5SDimitry Andric     old = 0;
1190b57cec5SDimitry Andric }
1200b57cec5SDimitry Andric /// locks for atomic operations
1210b57cec5SDimitry Andric static Lock locks[SPINLOCK_COUNT];
1220b57cec5SDimitry Andric #endif
1230b57cec5SDimitry Andric 
1240b57cec5SDimitry Andric /// Returns a lock to use for a given pointer.
1250b57cec5SDimitry Andric static __inline Lock *lock_for_pointer(void *ptr) {
1260b57cec5SDimitry Andric   intptr_t hash = (intptr_t)ptr;
1270b57cec5SDimitry Andric   // Disregard the lowest 4 bits.  We want all values that may be part of the
1280b57cec5SDimitry Andric   // same memory operation to hash to the same value and therefore use the same
1290b57cec5SDimitry Andric   // lock.
1300b57cec5SDimitry Andric   hash >>= 4;
1310b57cec5SDimitry Andric   // Use the next bits as the basis for the hash
1320b57cec5SDimitry Andric   intptr_t low = hash & SPINLOCK_MASK;
1330b57cec5SDimitry Andric   // Now use the high(er) set of bits to perturb the hash, so that we don't
1340b57cec5SDimitry Andric   // get collisions from atomic fields in a single object
1350b57cec5SDimitry Andric   hash >>= 16;
1360b57cec5SDimitry Andric   hash ^= low;
1370b57cec5SDimitry Andric   // Return a pointer to the word to use
1380b57cec5SDimitry Andric   return locks + (hash & SPINLOCK_MASK);
1390b57cec5SDimitry Andric }
1400b57cec5SDimitry Andric 
1410faeaeedSDimitry Andric /// Macros for determining whether a size is lock free.
142e8d8bef9SDimitry Andric #define ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(size, p)                  \
143e8d8bef9SDimitry Andric   (__atomic_always_lock_free(size, p) ||                                       \
144e8d8bef9SDimitry Andric    (__atomic_always_lock_free(size, 0) && ((uintptr_t)p % size) == 0))
145e8d8bef9SDimitry Andric #define IS_LOCK_FREE_1(p) ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(1, p)
146e8d8bef9SDimitry Andric #define IS_LOCK_FREE_2(p) ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(2, p)
147e8d8bef9SDimitry Andric #define IS_LOCK_FREE_4(p) ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(4, p)
148e8d8bef9SDimitry Andric #define IS_LOCK_FREE_8(p) ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(8, p)
149e8d8bef9SDimitry Andric #define IS_LOCK_FREE_16(p) ATOMIC_ALWAYS_LOCK_FREE_OR_ALIGNED_LOCK_FREE(16, p)
1500b57cec5SDimitry Andric 
1510b57cec5SDimitry Andric /// Macro that calls the compiler-generated lock-free versions of functions
1520b57cec5SDimitry Andric /// when they exist.
153e8d8bef9SDimitry Andric #define TRY_LOCK_FREE_CASE(n, type, ptr)                                       \
154e8d8bef9SDimitry Andric   case n:                                                                      \
155e8d8bef9SDimitry Andric     if (IS_LOCK_FREE_##n(ptr)) {                                               \
156e8d8bef9SDimitry Andric       LOCK_FREE_ACTION(type);                                                  \
157e8d8bef9SDimitry Andric     }                                                                          \
158e8d8bef9SDimitry Andric     break;
159e8d8bef9SDimitry Andric #ifdef __SIZEOF_INT128__
160e8d8bef9SDimitry Andric #define TRY_LOCK_FREE_CASE_16(p) TRY_LOCK_FREE_CASE(16, __uint128_t, p)
161e8d8bef9SDimitry Andric #else
162e8d8bef9SDimitry Andric #define TRY_LOCK_FREE_CASE_16(p) /* __uint128_t not available */
163e8d8bef9SDimitry Andric #endif
164e8d8bef9SDimitry Andric 
165e8d8bef9SDimitry Andric #define LOCK_FREE_CASES(ptr)                                                   \
1660b57cec5SDimitry Andric   do {                                                                         \
1670b57cec5SDimitry Andric     switch (size) {                                                            \
168e8d8bef9SDimitry Andric       TRY_LOCK_FREE_CASE(1, uint8_t, ptr)                                      \
169e8d8bef9SDimitry Andric       TRY_LOCK_FREE_CASE(2, uint16_t, ptr)                                     \
170e8d8bef9SDimitry Andric       TRY_LOCK_FREE_CASE(4, uint32_t, ptr)                                     \
171e8d8bef9SDimitry Andric       TRY_LOCK_FREE_CASE(8, uint64_t, ptr)                                     \
172e8d8bef9SDimitry Andric       TRY_LOCK_FREE_CASE_16(ptr) /* __uint128_t may not be supported */        \
173e8d8bef9SDimitry Andric     default:                                                                   \
1740b57cec5SDimitry Andric       break;                                                                   \
1750b57cec5SDimitry Andric     }                                                                          \
1760b57cec5SDimitry Andric   } while (0)
1770b57cec5SDimitry Andric 
178e8d8bef9SDimitry Andric /// Whether atomic operations for the given size (and alignment) are lock-free.
179e8d8bef9SDimitry Andric bool __atomic_is_lock_free_c(size_t size, void *ptr) {
180e8d8bef9SDimitry Andric #define LOCK_FREE_ACTION(type) return true;
181e8d8bef9SDimitry Andric   LOCK_FREE_CASES(ptr);
182e8d8bef9SDimitry Andric #undef LOCK_FREE_ACTION
183e8d8bef9SDimitry Andric   return false;
184e8d8bef9SDimitry Andric }
185e8d8bef9SDimitry Andric 
1860b57cec5SDimitry Andric /// An atomic load operation.  This is atomic with respect to the source
1870b57cec5SDimitry Andric /// pointer only.
1880b57cec5SDimitry Andric void __atomic_load_c(int size, void *src, void *dest, int model) {
1890b57cec5SDimitry Andric #define LOCK_FREE_ACTION(type)                                                 \
1900b57cec5SDimitry Andric   *((type *)dest) = __c11_atomic_load((_Atomic(type) *)src, model);            \
1910b57cec5SDimitry Andric   return;
192e8d8bef9SDimitry Andric   LOCK_FREE_CASES(src);
1930b57cec5SDimitry Andric #undef LOCK_FREE_ACTION
1940b57cec5SDimitry Andric   Lock *l = lock_for_pointer(src);
1950b57cec5SDimitry Andric   lock(l);
1960b57cec5SDimitry Andric   memcpy(dest, src, size);
1970b57cec5SDimitry Andric   unlock(l);
1980b57cec5SDimitry Andric }
1990b57cec5SDimitry Andric 
2000b57cec5SDimitry Andric /// An atomic store operation.  This is atomic with respect to the destination
2010b57cec5SDimitry Andric /// pointer only.
2020b57cec5SDimitry Andric void __atomic_store_c(int size, void *dest, void *src, int model) {
2030b57cec5SDimitry Andric #define LOCK_FREE_ACTION(type)                                                 \
2040b57cec5SDimitry Andric   __c11_atomic_store((_Atomic(type) *)dest, *(type *)src, model);              \
2050b57cec5SDimitry Andric   return;
206e8d8bef9SDimitry Andric   LOCK_FREE_CASES(dest);
2070b57cec5SDimitry Andric #undef LOCK_FREE_ACTION
2080b57cec5SDimitry Andric   Lock *l = lock_for_pointer(dest);
2090b57cec5SDimitry Andric   lock(l);
2100b57cec5SDimitry Andric   memcpy(dest, src, size);
2110b57cec5SDimitry Andric   unlock(l);
2120b57cec5SDimitry Andric }
2130b57cec5SDimitry Andric 
2140b57cec5SDimitry Andric /// Atomic compare and exchange operation.  If the value at *ptr is identical
2150b57cec5SDimitry Andric /// to the value at *expected, then this copies value at *desired to *ptr.  If
2160b57cec5SDimitry Andric /// they  are not, then this stores the current value from *ptr in *expected.
2170b57cec5SDimitry Andric ///
2180b57cec5SDimitry Andric /// This function returns 1 if the exchange takes place or 0 if it fails.
2190b57cec5SDimitry Andric int __atomic_compare_exchange_c(int size, void *ptr, void *expected,
2200b57cec5SDimitry Andric                                 void *desired, int success, int failure) {
2210b57cec5SDimitry Andric #define LOCK_FREE_ACTION(type)                                                 \
2220b57cec5SDimitry Andric   return __c11_atomic_compare_exchange_strong(                                 \
2230b57cec5SDimitry Andric       (_Atomic(type) *)ptr, (type *)expected, *(type *)desired, success,       \
2240b57cec5SDimitry Andric       failure)
225e8d8bef9SDimitry Andric   LOCK_FREE_CASES(ptr);
2260b57cec5SDimitry Andric #undef LOCK_FREE_ACTION
2270b57cec5SDimitry Andric   Lock *l = lock_for_pointer(ptr);
2280b57cec5SDimitry Andric   lock(l);
2290b57cec5SDimitry Andric   if (memcmp(ptr, expected, size) == 0) {
2300b57cec5SDimitry Andric     memcpy(ptr, desired, size);
2310b57cec5SDimitry Andric     unlock(l);
2320b57cec5SDimitry Andric     return 1;
2330b57cec5SDimitry Andric   }
2340b57cec5SDimitry Andric   memcpy(expected, ptr, size);
2350b57cec5SDimitry Andric   unlock(l);
2360b57cec5SDimitry Andric   return 0;
2370b57cec5SDimitry Andric }
2380b57cec5SDimitry Andric 
2390b57cec5SDimitry Andric /// Performs an atomic exchange operation between two pointers.  This is atomic
2400b57cec5SDimitry Andric /// with respect to the target address.
2410b57cec5SDimitry Andric void __atomic_exchange_c(int size, void *ptr, void *val, void *old, int model) {
2420b57cec5SDimitry Andric #define LOCK_FREE_ACTION(type)                                                 \
2430b57cec5SDimitry Andric   *(type *)old =                                                               \
2440b57cec5SDimitry Andric       __c11_atomic_exchange((_Atomic(type) *)ptr, *(type *)val, model);        \
2450b57cec5SDimitry Andric   return;
246e8d8bef9SDimitry Andric   LOCK_FREE_CASES(ptr);
2470b57cec5SDimitry Andric #undef LOCK_FREE_ACTION
2480b57cec5SDimitry Andric   Lock *l = lock_for_pointer(ptr);
2490b57cec5SDimitry Andric   lock(l);
2500b57cec5SDimitry Andric   memcpy(old, ptr, size);
2510b57cec5SDimitry Andric   memcpy(ptr, val, size);
2520b57cec5SDimitry Andric   unlock(l);
2530b57cec5SDimitry Andric }
2540b57cec5SDimitry Andric 
2550b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
2560b57cec5SDimitry Andric // Where the size is known at compile time, the compiler may emit calls to
2570b57cec5SDimitry Andric // specialised versions of the above functions.
2580b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
2590b57cec5SDimitry Andric #ifdef __SIZEOF_INT128__
2600b57cec5SDimitry Andric #define OPTIMISED_CASES                                                        \
2610b57cec5SDimitry Andric   OPTIMISED_CASE(1, IS_LOCK_FREE_1, uint8_t)                                   \
2620b57cec5SDimitry Andric   OPTIMISED_CASE(2, IS_LOCK_FREE_2, uint16_t)                                  \
2630b57cec5SDimitry Andric   OPTIMISED_CASE(4, IS_LOCK_FREE_4, uint32_t)                                  \
2640b57cec5SDimitry Andric   OPTIMISED_CASE(8, IS_LOCK_FREE_8, uint64_t)                                  \
2650b57cec5SDimitry Andric   OPTIMISED_CASE(16, IS_LOCK_FREE_16, __uint128_t)
2660b57cec5SDimitry Andric #else
2670b57cec5SDimitry Andric #define OPTIMISED_CASES                                                        \
2680b57cec5SDimitry Andric   OPTIMISED_CASE(1, IS_LOCK_FREE_1, uint8_t)                                   \
2690b57cec5SDimitry Andric   OPTIMISED_CASE(2, IS_LOCK_FREE_2, uint16_t)                                  \
2700b57cec5SDimitry Andric   OPTIMISED_CASE(4, IS_LOCK_FREE_4, uint32_t)                                  \
2710b57cec5SDimitry Andric   OPTIMISED_CASE(8, IS_LOCK_FREE_8, uint64_t)
2720b57cec5SDimitry Andric #endif
2730b57cec5SDimitry Andric 
2740b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type)                                      \
2750b57cec5SDimitry Andric   type __atomic_load_##n(type *src, int model) {                               \
276e8d8bef9SDimitry Andric     if (lockfree(src))                                                         \
2770b57cec5SDimitry Andric       return __c11_atomic_load((_Atomic(type) *)src, model);                   \
2780b57cec5SDimitry Andric     Lock *l = lock_for_pointer(src);                                           \
2790b57cec5SDimitry Andric     lock(l);                                                                   \
2800b57cec5SDimitry Andric     type val = *src;                                                           \
2810b57cec5SDimitry Andric     unlock(l);                                                                 \
2820b57cec5SDimitry Andric     return val;                                                                \
2830b57cec5SDimitry Andric   }
2840b57cec5SDimitry Andric OPTIMISED_CASES
2850b57cec5SDimitry Andric #undef OPTIMISED_CASE
2860b57cec5SDimitry Andric 
2870b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type)                                      \
2880b57cec5SDimitry Andric   void __atomic_store_##n(type *dest, type val, int model) {                   \
289e8d8bef9SDimitry Andric     if (lockfree(dest)) {                                                      \
2900b57cec5SDimitry Andric       __c11_atomic_store((_Atomic(type) *)dest, val, model);                   \
2910b57cec5SDimitry Andric       return;                                                                  \
2920b57cec5SDimitry Andric     }                                                                          \
2930b57cec5SDimitry Andric     Lock *l = lock_for_pointer(dest);                                          \
2940b57cec5SDimitry Andric     lock(l);                                                                   \
2950b57cec5SDimitry Andric     *dest = val;                                                               \
2960b57cec5SDimitry Andric     unlock(l);                                                                 \
2970b57cec5SDimitry Andric     return;                                                                    \
2980b57cec5SDimitry Andric   }
2990b57cec5SDimitry Andric OPTIMISED_CASES
3000b57cec5SDimitry Andric #undef OPTIMISED_CASE
3010b57cec5SDimitry Andric 
3020b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type)                                      \
3030b57cec5SDimitry Andric   type __atomic_exchange_##n(type *dest, type val, int model) {                \
304e8d8bef9SDimitry Andric     if (lockfree(dest))                                                        \
3050b57cec5SDimitry Andric       return __c11_atomic_exchange((_Atomic(type) *)dest, val, model);         \
3060b57cec5SDimitry Andric     Lock *l = lock_for_pointer(dest);                                          \
3070b57cec5SDimitry Andric     lock(l);                                                                   \
3080b57cec5SDimitry Andric     type tmp = *dest;                                                          \
3090b57cec5SDimitry Andric     *dest = val;                                                               \
3100b57cec5SDimitry Andric     unlock(l);                                                                 \
3110b57cec5SDimitry Andric     return tmp;                                                                \
3120b57cec5SDimitry Andric   }
3130b57cec5SDimitry Andric OPTIMISED_CASES
3140b57cec5SDimitry Andric #undef OPTIMISED_CASE
3150b57cec5SDimitry Andric 
3160b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type)                                      \
3175ffd83dbSDimitry Andric   bool __atomic_compare_exchange_##n(type *ptr, type *expected, type desired,  \
3180b57cec5SDimitry Andric                                      int success, int failure) {               \
319e8d8bef9SDimitry Andric     if (lockfree(ptr))                                                         \
3200b57cec5SDimitry Andric       return __c11_atomic_compare_exchange_strong(                             \
3210b57cec5SDimitry Andric           (_Atomic(type) *)ptr, expected, desired, success, failure);          \
3220b57cec5SDimitry Andric     Lock *l = lock_for_pointer(ptr);                                           \
3230b57cec5SDimitry Andric     lock(l);                                                                   \
3240b57cec5SDimitry Andric     if (*ptr == *expected) {                                                   \
3250b57cec5SDimitry Andric       *ptr = desired;                                                          \
3260b57cec5SDimitry Andric       unlock(l);                                                               \
3275ffd83dbSDimitry Andric       return true;                                                             \
3280b57cec5SDimitry Andric     }                                                                          \
3290b57cec5SDimitry Andric     *expected = *ptr;                                                          \
3300b57cec5SDimitry Andric     unlock(l);                                                                 \
3315ffd83dbSDimitry Andric     return false;                                                              \
3320b57cec5SDimitry Andric   }
3330b57cec5SDimitry Andric OPTIMISED_CASES
3340b57cec5SDimitry Andric #undef OPTIMISED_CASE
3350b57cec5SDimitry Andric 
3360b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
3370b57cec5SDimitry Andric // Atomic read-modify-write operations for integers of various sizes.
3380b57cec5SDimitry Andric ////////////////////////////////////////////////////////////////////////////////
3390b57cec5SDimitry Andric #define ATOMIC_RMW(n, lockfree, type, opname, op)                              \
3400b57cec5SDimitry Andric   type __atomic_fetch_##opname##_##n(type *ptr, type val, int model) {         \
341e8d8bef9SDimitry Andric     if (lockfree(ptr))                                                         \
3420b57cec5SDimitry Andric       return __c11_atomic_fetch_##opname((_Atomic(type) *)ptr, val, model);    \
3430b57cec5SDimitry Andric     Lock *l = lock_for_pointer(ptr);                                           \
3440b57cec5SDimitry Andric     lock(l);                                                                   \
3450b57cec5SDimitry Andric     type tmp = *ptr;                                                           \
3460b57cec5SDimitry Andric     *ptr = tmp op val;                                                         \
3470b57cec5SDimitry Andric     unlock(l);                                                                 \
3480b57cec5SDimitry Andric     return tmp;                                                                \
3490b57cec5SDimitry Andric   }
3500b57cec5SDimitry Andric 
351349cc55cSDimitry Andric #define ATOMIC_RMW_NAND(n, lockfree, type)                                     \
352349cc55cSDimitry Andric   type __atomic_fetch_nand_##n(type *ptr, type val, int model) {               \
353349cc55cSDimitry Andric     if (lockfree(ptr))                                                         \
354349cc55cSDimitry Andric       return __c11_atomic_fetch_nand((_Atomic(type) *)ptr, val, model);        \
355349cc55cSDimitry Andric     Lock *l = lock_for_pointer(ptr);                                           \
356349cc55cSDimitry Andric     lock(l);                                                                   \
357349cc55cSDimitry Andric     type tmp = *ptr;                                                           \
358349cc55cSDimitry Andric     *ptr = ~(tmp & val);                                                       \
359349cc55cSDimitry Andric     unlock(l);                                                                 \
360349cc55cSDimitry Andric     return tmp;                                                                \
361349cc55cSDimitry Andric   }
362349cc55cSDimitry Andric 
3630b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, add, +)
3640b57cec5SDimitry Andric OPTIMISED_CASES
3650b57cec5SDimitry Andric #undef OPTIMISED_CASE
3660b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, sub, -)
3670b57cec5SDimitry Andric OPTIMISED_CASES
3680b57cec5SDimitry Andric #undef OPTIMISED_CASE
3690b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, and, &)
3700b57cec5SDimitry Andric OPTIMISED_CASES
3710b57cec5SDimitry Andric #undef OPTIMISED_CASE
3720b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, or, |)
3730b57cec5SDimitry Andric OPTIMISED_CASES
3740b57cec5SDimitry Andric #undef OPTIMISED_CASE
3750b57cec5SDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW(n, lockfree, type, xor, ^)
3760b57cec5SDimitry Andric OPTIMISED_CASES
3770b57cec5SDimitry Andric #undef OPTIMISED_CASE
378cfefd16dSDimitry Andric // Allow build with clang without __c11_atomic_fetch_nand builtin (pre-14)
379cfefd16dSDimitry Andric #if __has_builtin(__c11_atomic_fetch_nand)
380349cc55cSDimitry Andric #define OPTIMISED_CASE(n, lockfree, type) ATOMIC_RMW_NAND(n, lockfree, type)
381349cc55cSDimitry Andric OPTIMISED_CASES
382349cc55cSDimitry Andric #undef OPTIMISED_CASE
383cfefd16dSDimitry Andric #endif
384