xref: /freebsd-src/contrib/llvm-project/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp (revision 5f757f3ff9144b609b3c433dfd370cc6bdc191ad)
168d75effSDimitry Andric //===-- tsan_interface_atomic.cpp -----------------------------------------===//
268d75effSDimitry Andric //
368d75effSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
468d75effSDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
568d75effSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
668d75effSDimitry Andric //
768d75effSDimitry Andric //===----------------------------------------------------------------------===//
868d75effSDimitry Andric //
968d75effSDimitry Andric // This file is a part of ThreadSanitizer (TSan), a race detector.
1068d75effSDimitry Andric //
1168d75effSDimitry Andric //===----------------------------------------------------------------------===//
1268d75effSDimitry Andric 
1368d75effSDimitry Andric // ThreadSanitizer atomic operations are based on C++11/C1x standards.
1468d75effSDimitry Andric // For background see C++11 standard.  A slightly older, publicly
1568d75effSDimitry Andric // available draft of the standard (not entirely up-to-date, but close enough
1668d75effSDimitry Andric // for casual browsing) is available here:
1768d75effSDimitry Andric // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3242.pdf
1868d75effSDimitry Andric // The following page contains more background information:
1968d75effSDimitry Andric // http://www.hpl.hp.com/personal/Hans_Boehm/c++mm/
2068d75effSDimitry Andric 
2168d75effSDimitry Andric #include "sanitizer_common/sanitizer_placement_new.h"
2268d75effSDimitry Andric #include "sanitizer_common/sanitizer_stacktrace.h"
2368d75effSDimitry Andric #include "sanitizer_common/sanitizer_mutex.h"
2468d75effSDimitry Andric #include "tsan_flags.h"
2568d75effSDimitry Andric #include "tsan_interface.h"
2668d75effSDimitry Andric #include "tsan_rtl.h"
2768d75effSDimitry Andric 
2868d75effSDimitry Andric using namespace __tsan;
2968d75effSDimitry Andric 
3068d75effSDimitry Andric #if !SANITIZER_GO && __TSAN_HAS_INT128
3168d75effSDimitry Andric // Protects emulation of 128-bit atomic operations.
3268d75effSDimitry Andric static StaticSpinMutex mutex128;
3368d75effSDimitry Andric #endif
3468d75effSDimitry Andric 
35349cc55cSDimitry Andric #if SANITIZER_DEBUG
IsLoadOrder(morder mo)3668d75effSDimitry Andric static bool IsLoadOrder(morder mo) {
3768d75effSDimitry Andric   return mo == mo_relaxed || mo == mo_consume
3868d75effSDimitry Andric       || mo == mo_acquire || mo == mo_seq_cst;
3968d75effSDimitry Andric }
4068d75effSDimitry Andric 
IsStoreOrder(morder mo)4168d75effSDimitry Andric static bool IsStoreOrder(morder mo) {
4268d75effSDimitry Andric   return mo == mo_relaxed || mo == mo_release || mo == mo_seq_cst;
4368d75effSDimitry Andric }
44349cc55cSDimitry Andric #endif
4568d75effSDimitry Andric 
IsReleaseOrder(morder mo)4668d75effSDimitry Andric static bool IsReleaseOrder(morder mo) {
4768d75effSDimitry Andric   return mo == mo_release || mo == mo_acq_rel || mo == mo_seq_cst;
4868d75effSDimitry Andric }
4968d75effSDimitry Andric 
IsAcquireOrder(morder mo)5068d75effSDimitry Andric static bool IsAcquireOrder(morder mo) {
5168d75effSDimitry Andric   return mo == mo_consume || mo == mo_acquire
5268d75effSDimitry Andric       || mo == mo_acq_rel || mo == mo_seq_cst;
5368d75effSDimitry Andric }
5468d75effSDimitry Andric 
IsAcqRelOrder(morder mo)5568d75effSDimitry Andric static bool IsAcqRelOrder(morder mo) {
5668d75effSDimitry Andric   return mo == mo_acq_rel || mo == mo_seq_cst;
5768d75effSDimitry Andric }
5868d75effSDimitry Andric 
func_xchg(volatile T * v,T op)5968d75effSDimitry Andric template<typename T> T func_xchg(volatile T *v, T op) {
6068d75effSDimitry Andric   T res = __sync_lock_test_and_set(v, op);
6168d75effSDimitry Andric   // __sync_lock_test_and_set does not contain full barrier.
6268d75effSDimitry Andric   __sync_synchronize();
6368d75effSDimitry Andric   return res;
6468d75effSDimitry Andric }
6568d75effSDimitry Andric 
func_add(volatile T * v,T op)6668d75effSDimitry Andric template<typename T> T func_add(volatile T *v, T op) {
6768d75effSDimitry Andric   return __sync_fetch_and_add(v, op);
6868d75effSDimitry Andric }
6968d75effSDimitry Andric 
func_sub(volatile T * v,T op)7068d75effSDimitry Andric template<typename T> T func_sub(volatile T *v, T op) {
7168d75effSDimitry Andric   return __sync_fetch_and_sub(v, op);
7268d75effSDimitry Andric }
7368d75effSDimitry Andric 
func_and(volatile T * v,T op)7468d75effSDimitry Andric template<typename T> T func_and(volatile T *v, T op) {
7568d75effSDimitry Andric   return __sync_fetch_and_and(v, op);
7668d75effSDimitry Andric }
7768d75effSDimitry Andric 
func_or(volatile T * v,T op)7868d75effSDimitry Andric template<typename T> T func_or(volatile T *v, T op) {
7968d75effSDimitry Andric   return __sync_fetch_and_or(v, op);
8068d75effSDimitry Andric }
8168d75effSDimitry Andric 
func_xor(volatile T * v,T op)8268d75effSDimitry Andric template<typename T> T func_xor(volatile T *v, T op) {
8368d75effSDimitry Andric   return __sync_fetch_and_xor(v, op);
8468d75effSDimitry Andric }
8568d75effSDimitry Andric 
func_nand(volatile T * v,T op)8668d75effSDimitry Andric template<typename T> T func_nand(volatile T *v, T op) {
8768d75effSDimitry Andric   // clang does not support __sync_fetch_and_nand.
8868d75effSDimitry Andric   T cmp = *v;
8968d75effSDimitry Andric   for (;;) {
9068d75effSDimitry Andric     T newv = ~(cmp & op);
9168d75effSDimitry Andric     T cur = __sync_val_compare_and_swap(v, cmp, newv);
9268d75effSDimitry Andric     if (cmp == cur)
9368d75effSDimitry Andric       return cmp;
9468d75effSDimitry Andric     cmp = cur;
9568d75effSDimitry Andric   }
9668d75effSDimitry Andric }
9768d75effSDimitry Andric 
func_cas(volatile T * v,T cmp,T xch)9868d75effSDimitry Andric template<typename T> T func_cas(volatile T *v, T cmp, T xch) {
9968d75effSDimitry Andric   return __sync_val_compare_and_swap(v, cmp, xch);
10068d75effSDimitry Andric }
10168d75effSDimitry Andric 
10268d75effSDimitry Andric // clang does not support 128-bit atomic ops.
10368d75effSDimitry Andric // Atomic ops are executed under tsan internal mutex,
10468d75effSDimitry Andric // here we assume that the atomic variables are not accessed
10568d75effSDimitry Andric // from non-instrumented code.
10668d75effSDimitry Andric #if !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16) && !SANITIZER_GO \
10768d75effSDimitry Andric     && __TSAN_HAS_INT128
func_xchg(volatile a128 * v,a128 op)10868d75effSDimitry Andric a128 func_xchg(volatile a128 *v, a128 op) {
10968d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
11068d75effSDimitry Andric   a128 cmp = *v;
11168d75effSDimitry Andric   *v = op;
11268d75effSDimitry Andric   return cmp;
11368d75effSDimitry Andric }
11468d75effSDimitry Andric 
func_add(volatile a128 * v,a128 op)11568d75effSDimitry Andric a128 func_add(volatile a128 *v, a128 op) {
11668d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
11768d75effSDimitry Andric   a128 cmp = *v;
11868d75effSDimitry Andric   *v = cmp + op;
11968d75effSDimitry Andric   return cmp;
12068d75effSDimitry Andric }
12168d75effSDimitry Andric 
func_sub(volatile a128 * v,a128 op)12268d75effSDimitry Andric a128 func_sub(volatile a128 *v, a128 op) {
12368d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
12468d75effSDimitry Andric   a128 cmp = *v;
12568d75effSDimitry Andric   *v = cmp - op;
12668d75effSDimitry Andric   return cmp;
12768d75effSDimitry Andric }
12868d75effSDimitry Andric 
func_and(volatile a128 * v,a128 op)12968d75effSDimitry Andric a128 func_and(volatile a128 *v, a128 op) {
13068d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
13168d75effSDimitry Andric   a128 cmp = *v;
13268d75effSDimitry Andric   *v = cmp & op;
13368d75effSDimitry Andric   return cmp;
13468d75effSDimitry Andric }
13568d75effSDimitry Andric 
func_or(volatile a128 * v,a128 op)13668d75effSDimitry Andric a128 func_or(volatile a128 *v, a128 op) {
13768d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
13868d75effSDimitry Andric   a128 cmp = *v;
13968d75effSDimitry Andric   *v = cmp | op;
14068d75effSDimitry Andric   return cmp;
14168d75effSDimitry Andric }
14268d75effSDimitry Andric 
func_xor(volatile a128 * v,a128 op)14368d75effSDimitry Andric a128 func_xor(volatile a128 *v, a128 op) {
14468d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
14568d75effSDimitry Andric   a128 cmp = *v;
14668d75effSDimitry Andric   *v = cmp ^ op;
14768d75effSDimitry Andric   return cmp;
14868d75effSDimitry Andric }
14968d75effSDimitry Andric 
func_nand(volatile a128 * v,a128 op)15068d75effSDimitry Andric a128 func_nand(volatile a128 *v, a128 op) {
15168d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
15268d75effSDimitry Andric   a128 cmp = *v;
15368d75effSDimitry Andric   *v = ~(cmp & op);
15468d75effSDimitry Andric   return cmp;
15568d75effSDimitry Andric }
15668d75effSDimitry Andric 
func_cas(volatile a128 * v,a128 cmp,a128 xch)15768d75effSDimitry Andric a128 func_cas(volatile a128 *v, a128 cmp, a128 xch) {
15868d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
15968d75effSDimitry Andric   a128 cur = *v;
16068d75effSDimitry Andric   if (cur == cmp)
16168d75effSDimitry Andric     *v = xch;
16268d75effSDimitry Andric   return cur;
16368d75effSDimitry Andric }
16468d75effSDimitry Andric #endif
16568d75effSDimitry Andric 
16668d75effSDimitry Andric template <typename T>
AccessSize()167349cc55cSDimitry Andric static int AccessSize() {
16868d75effSDimitry Andric   if (sizeof(T) <= 1)
169349cc55cSDimitry Andric     return 1;
17068d75effSDimitry Andric   else if (sizeof(T) <= 2)
171349cc55cSDimitry Andric     return 2;
17268d75effSDimitry Andric   else if (sizeof(T) <= 4)
173349cc55cSDimitry Andric     return 4;
17468d75effSDimitry Andric   else
175349cc55cSDimitry Andric     return 8;
17668d75effSDimitry Andric   // For 16-byte atomics we also use 8-byte memory access,
17768d75effSDimitry Andric   // this leads to false negatives only in very obscure cases.
17868d75effSDimitry Andric }
17968d75effSDimitry Andric 
18068d75effSDimitry Andric #if !SANITIZER_GO
to_atomic(const volatile a8 * a)18168d75effSDimitry Andric static atomic_uint8_t *to_atomic(const volatile a8 *a) {
18268d75effSDimitry Andric   return reinterpret_cast<atomic_uint8_t *>(const_cast<a8 *>(a));
18368d75effSDimitry Andric }
18468d75effSDimitry Andric 
to_atomic(const volatile a16 * a)18568d75effSDimitry Andric static atomic_uint16_t *to_atomic(const volatile a16 *a) {
18668d75effSDimitry Andric   return reinterpret_cast<atomic_uint16_t *>(const_cast<a16 *>(a));
18768d75effSDimitry Andric }
18868d75effSDimitry Andric #endif
18968d75effSDimitry Andric 
to_atomic(const volatile a32 * a)19068d75effSDimitry Andric static atomic_uint32_t *to_atomic(const volatile a32 *a) {
19168d75effSDimitry Andric   return reinterpret_cast<atomic_uint32_t *>(const_cast<a32 *>(a));
19268d75effSDimitry Andric }
19368d75effSDimitry Andric 
to_atomic(const volatile a64 * a)19468d75effSDimitry Andric static atomic_uint64_t *to_atomic(const volatile a64 *a) {
19568d75effSDimitry Andric   return reinterpret_cast<atomic_uint64_t *>(const_cast<a64 *>(a));
19668d75effSDimitry Andric }
19768d75effSDimitry Andric 
to_mo(morder mo)19868d75effSDimitry Andric static memory_order to_mo(morder mo) {
19968d75effSDimitry Andric   switch (mo) {
20068d75effSDimitry Andric   case mo_relaxed: return memory_order_relaxed;
20168d75effSDimitry Andric   case mo_consume: return memory_order_consume;
20268d75effSDimitry Andric   case mo_acquire: return memory_order_acquire;
20368d75effSDimitry Andric   case mo_release: return memory_order_release;
20468d75effSDimitry Andric   case mo_acq_rel: return memory_order_acq_rel;
20568d75effSDimitry Andric   case mo_seq_cst: return memory_order_seq_cst;
20668d75effSDimitry Andric   }
207349cc55cSDimitry Andric   DCHECK(0);
20868d75effSDimitry Andric   return memory_order_seq_cst;
20968d75effSDimitry Andric }
21068d75effSDimitry Andric 
21168d75effSDimitry Andric template<typename T>
NoTsanAtomicLoad(const volatile T * a,morder mo)21268d75effSDimitry Andric static T NoTsanAtomicLoad(const volatile T *a, morder mo) {
21368d75effSDimitry Andric   return atomic_load(to_atomic(a), to_mo(mo));
21468d75effSDimitry Andric }
21568d75effSDimitry Andric 
21668d75effSDimitry Andric #if __TSAN_HAS_INT128 && !SANITIZER_GO
NoTsanAtomicLoad(const volatile a128 * a,morder mo)21768d75effSDimitry Andric static a128 NoTsanAtomicLoad(const volatile a128 *a, morder mo) {
21868d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
21968d75effSDimitry Andric   return *a;
22068d75effSDimitry Andric }
22168d75effSDimitry Andric #endif
22268d75effSDimitry Andric 
22368d75effSDimitry Andric template <typename T>
AtomicLoad(ThreadState * thr,uptr pc,const volatile T * a,morder mo)224349cc55cSDimitry Andric static T AtomicLoad(ThreadState *thr, uptr pc, const volatile T *a, morder mo) {
225349cc55cSDimitry Andric   DCHECK(IsLoadOrder(mo));
22668d75effSDimitry Andric   // This fast-path is critical for performance.
22768d75effSDimitry Andric   // Assume the access is atomic.
22868d75effSDimitry Andric   if (!IsAcquireOrder(mo)) {
229349cc55cSDimitry Andric     MemoryAccess(thr, pc, (uptr)a, AccessSize<T>(),
230349cc55cSDimitry Andric                  kAccessRead | kAccessAtomic);
23168d75effSDimitry Andric     return NoTsanAtomicLoad(a, mo);
23268d75effSDimitry Andric   }
23368d75effSDimitry Andric   // Don't create sync object if it does not exist yet. For example, an atomic
23468d75effSDimitry Andric   // pointer is initialized to nullptr and then periodically acquire-loaded.
23568d75effSDimitry Andric   T v = NoTsanAtomicLoad(a, mo);
236349cc55cSDimitry Andric   SyncVar *s = ctx->metamap.GetSyncIfExists((uptr)a);
23768d75effSDimitry Andric   if (s) {
2380eae32dcSDimitry Andric     SlotLocker locker(thr);
2390eae32dcSDimitry Andric     ReadLock lock(&s->mtx);
2400eae32dcSDimitry Andric     thr->clock.Acquire(s->clock);
24168d75effSDimitry Andric     // Re-read under sync mutex because we need a consistent snapshot
24268d75effSDimitry Andric     // of the value and the clock we acquire.
24368d75effSDimitry Andric     v = NoTsanAtomicLoad(a, mo);
24468d75effSDimitry Andric   }
245349cc55cSDimitry Andric   MemoryAccess(thr, pc, (uptr)a, AccessSize<T>(), kAccessRead | kAccessAtomic);
24668d75effSDimitry Andric   return v;
24768d75effSDimitry Andric }
24868d75effSDimitry Andric 
24968d75effSDimitry Andric template<typename T>
NoTsanAtomicStore(volatile T * a,T v,morder mo)25068d75effSDimitry Andric static void NoTsanAtomicStore(volatile T *a, T v, morder mo) {
25168d75effSDimitry Andric   atomic_store(to_atomic(a), v, to_mo(mo));
25268d75effSDimitry Andric }
25368d75effSDimitry Andric 
25468d75effSDimitry Andric #if __TSAN_HAS_INT128 && !SANITIZER_GO
NoTsanAtomicStore(volatile a128 * a,a128 v,morder mo)25568d75effSDimitry Andric static void NoTsanAtomicStore(volatile a128 *a, a128 v, morder mo) {
25668d75effSDimitry Andric   SpinMutexLock lock(&mutex128);
25768d75effSDimitry Andric   *a = v;
25868d75effSDimitry Andric }
25968d75effSDimitry Andric #endif
26068d75effSDimitry Andric 
26168d75effSDimitry Andric template <typename T>
AtomicStore(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)26268d75effSDimitry Andric static void AtomicStore(ThreadState *thr, uptr pc, volatile T *a, T v,
263349cc55cSDimitry Andric                         morder mo) {
264349cc55cSDimitry Andric   DCHECK(IsStoreOrder(mo));
265349cc55cSDimitry Andric   MemoryAccess(thr, pc, (uptr)a, AccessSize<T>(), kAccessWrite | kAccessAtomic);
26668d75effSDimitry Andric   // This fast-path is critical for performance.
26768d75effSDimitry Andric   // Assume the access is atomic.
26868d75effSDimitry Andric   // Strictly saying even relaxed store cuts off release sequence,
26968d75effSDimitry Andric   // so must reset the clock.
27068d75effSDimitry Andric   if (!IsReleaseOrder(mo)) {
27168d75effSDimitry Andric     NoTsanAtomicStore(a, v, mo);
27268d75effSDimitry Andric     return;
27368d75effSDimitry Andric   }
2740eae32dcSDimitry Andric   SlotLocker locker(thr);
2750eae32dcSDimitry Andric   {
2760eae32dcSDimitry Andric     auto s = ctx->metamap.GetSyncOrCreate(thr, pc, (uptr)a, false);
2770eae32dcSDimitry Andric     Lock lock(&s->mtx);
2780eae32dcSDimitry Andric     thr->clock.ReleaseStore(&s->clock);
27968d75effSDimitry Andric     NoTsanAtomicStore(a, v, mo);
28068d75effSDimitry Andric   }
2810eae32dcSDimitry Andric   IncrementEpoch(thr);
2820eae32dcSDimitry Andric }
28368d75effSDimitry Andric 
28468d75effSDimitry Andric template <typename T, T (*F)(volatile T *v, T op)>
AtomicRMW(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)285349cc55cSDimitry Andric static T AtomicRMW(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) {
286349cc55cSDimitry Andric   MemoryAccess(thr, pc, (uptr)a, AccessSize<T>(), kAccessWrite | kAccessAtomic);
287349cc55cSDimitry Andric   if (LIKELY(mo == mo_relaxed))
288349cc55cSDimitry Andric     return F(a, v);
2890eae32dcSDimitry Andric   SlotLocker locker(thr);
2900eae32dcSDimitry Andric   {
2910eae32dcSDimitry Andric     auto s = ctx->metamap.GetSyncOrCreate(thr, pc, (uptr)a, false);
2920eae32dcSDimitry Andric     RWLock lock(&s->mtx, IsReleaseOrder(mo));
29368d75effSDimitry Andric     if (IsAcqRelOrder(mo))
2940eae32dcSDimitry Andric       thr->clock.ReleaseAcquire(&s->clock);
29568d75effSDimitry Andric     else if (IsReleaseOrder(mo))
2960eae32dcSDimitry Andric       thr->clock.Release(&s->clock);
29768d75effSDimitry Andric     else if (IsAcquireOrder(mo))
2980eae32dcSDimitry Andric       thr->clock.Acquire(s->clock);
2990eae32dcSDimitry Andric     v = F(a, v);
3000eae32dcSDimitry Andric   }
3010eae32dcSDimitry Andric   if (IsReleaseOrder(mo))
3020eae32dcSDimitry Andric     IncrementEpoch(thr);
3030eae32dcSDimitry Andric   return v;
30468d75effSDimitry Andric }
30568d75effSDimitry Andric 
30668d75effSDimitry Andric template<typename T>
NoTsanAtomicExchange(volatile T * a,T v,morder mo)30768d75effSDimitry Andric static T NoTsanAtomicExchange(volatile T *a, T v, morder mo) {
30868d75effSDimitry Andric   return func_xchg(a, v);
30968d75effSDimitry Andric }
31068d75effSDimitry Andric 
31168d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchAdd(volatile T * a,T v,morder mo)31268d75effSDimitry Andric static T NoTsanAtomicFetchAdd(volatile T *a, T v, morder mo) {
31368d75effSDimitry Andric   return func_add(a, v);
31468d75effSDimitry Andric }
31568d75effSDimitry Andric 
31668d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchSub(volatile T * a,T v,morder mo)31768d75effSDimitry Andric static T NoTsanAtomicFetchSub(volatile T *a, T v, morder mo) {
31868d75effSDimitry Andric   return func_sub(a, v);
31968d75effSDimitry Andric }
32068d75effSDimitry Andric 
32168d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchAnd(volatile T * a,T v,morder mo)32268d75effSDimitry Andric static T NoTsanAtomicFetchAnd(volatile T *a, T v, morder mo) {
32368d75effSDimitry Andric   return func_and(a, v);
32468d75effSDimitry Andric }
32568d75effSDimitry Andric 
32668d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchOr(volatile T * a,T v,morder mo)32768d75effSDimitry Andric static T NoTsanAtomicFetchOr(volatile T *a, T v, morder mo) {
32868d75effSDimitry Andric   return func_or(a, v);
32968d75effSDimitry Andric }
33068d75effSDimitry Andric 
33168d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchXor(volatile T * a,T v,morder mo)33268d75effSDimitry Andric static T NoTsanAtomicFetchXor(volatile T *a, T v, morder mo) {
33368d75effSDimitry Andric   return func_xor(a, v);
33468d75effSDimitry Andric }
33568d75effSDimitry Andric 
33668d75effSDimitry Andric template<typename T>
NoTsanAtomicFetchNand(volatile T * a,T v,morder mo)33768d75effSDimitry Andric static T NoTsanAtomicFetchNand(volatile T *a, T v, morder mo) {
33868d75effSDimitry Andric   return func_nand(a, v);
33968d75effSDimitry Andric }
34068d75effSDimitry Andric 
34168d75effSDimitry Andric template<typename T>
AtomicExchange(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)34268d75effSDimitry Andric static T AtomicExchange(ThreadState *thr, uptr pc, volatile T *a, T v,
34368d75effSDimitry Andric     morder mo) {
34468d75effSDimitry Andric   return AtomicRMW<T, func_xchg>(thr, pc, a, v, mo);
34568d75effSDimitry Andric }
34668d75effSDimitry Andric 
34768d75effSDimitry Andric template<typename T>
AtomicFetchAdd(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)34868d75effSDimitry Andric static T AtomicFetchAdd(ThreadState *thr, uptr pc, volatile T *a, T v,
34968d75effSDimitry Andric     morder mo) {
35068d75effSDimitry Andric   return AtomicRMW<T, func_add>(thr, pc, a, v, mo);
35168d75effSDimitry Andric }
35268d75effSDimitry Andric 
35368d75effSDimitry Andric template<typename T>
AtomicFetchSub(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)35468d75effSDimitry Andric static T AtomicFetchSub(ThreadState *thr, uptr pc, volatile T *a, T v,
35568d75effSDimitry Andric     morder mo) {
35668d75effSDimitry Andric   return AtomicRMW<T, func_sub>(thr, pc, a, v, mo);
35768d75effSDimitry Andric }
35868d75effSDimitry Andric 
35968d75effSDimitry Andric template<typename T>
AtomicFetchAnd(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)36068d75effSDimitry Andric static T AtomicFetchAnd(ThreadState *thr, uptr pc, volatile T *a, T v,
36168d75effSDimitry Andric     morder mo) {
36268d75effSDimitry Andric   return AtomicRMW<T, func_and>(thr, pc, a, v, mo);
36368d75effSDimitry Andric }
36468d75effSDimitry Andric 
36568d75effSDimitry Andric template<typename T>
AtomicFetchOr(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)36668d75effSDimitry Andric static T AtomicFetchOr(ThreadState *thr, uptr pc, volatile T *a, T v,
36768d75effSDimitry Andric     morder mo) {
36868d75effSDimitry Andric   return AtomicRMW<T, func_or>(thr, pc, a, v, mo);
36968d75effSDimitry Andric }
37068d75effSDimitry Andric 
37168d75effSDimitry Andric template<typename T>
AtomicFetchXor(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)37268d75effSDimitry Andric static T AtomicFetchXor(ThreadState *thr, uptr pc, volatile T *a, T v,
37368d75effSDimitry Andric     morder mo) {
37468d75effSDimitry Andric   return AtomicRMW<T, func_xor>(thr, pc, a, v, mo);
37568d75effSDimitry Andric }
37668d75effSDimitry Andric 
37768d75effSDimitry Andric template<typename T>
AtomicFetchNand(ThreadState * thr,uptr pc,volatile T * a,T v,morder mo)37868d75effSDimitry Andric static T AtomicFetchNand(ThreadState *thr, uptr pc, volatile T *a, T v,
37968d75effSDimitry Andric     morder mo) {
38068d75effSDimitry Andric   return AtomicRMW<T, func_nand>(thr, pc, a, v, mo);
38168d75effSDimitry Andric }
38268d75effSDimitry Andric 
38368d75effSDimitry Andric template<typename T>
NoTsanAtomicCAS(volatile T * a,T * c,T v,morder mo,morder fmo)38468d75effSDimitry Andric static bool NoTsanAtomicCAS(volatile T *a, T *c, T v, morder mo, morder fmo) {
38568d75effSDimitry Andric   return atomic_compare_exchange_strong(to_atomic(a), c, v, to_mo(mo));
38668d75effSDimitry Andric }
38768d75effSDimitry Andric 
38868d75effSDimitry Andric #if __TSAN_HAS_INT128
NoTsanAtomicCAS(volatile a128 * a,a128 * c,a128 v,morder mo,morder fmo)38968d75effSDimitry Andric static bool NoTsanAtomicCAS(volatile a128 *a, a128 *c, a128 v,
39068d75effSDimitry Andric     morder mo, morder fmo) {
39168d75effSDimitry Andric   a128 old = *c;
39268d75effSDimitry Andric   a128 cur = func_cas(a, old, v);
39368d75effSDimitry Andric   if (cur == old)
39468d75effSDimitry Andric     return true;
39568d75effSDimitry Andric   *c = cur;
39668d75effSDimitry Andric   return false;
39768d75effSDimitry Andric }
39868d75effSDimitry Andric #endif
39968d75effSDimitry Andric 
40068d75effSDimitry Andric template<typename T>
NoTsanAtomicCAS(volatile T * a,T c,T v,morder mo,morder fmo)40168d75effSDimitry Andric static T NoTsanAtomicCAS(volatile T *a, T c, T v, morder mo, morder fmo) {
40268d75effSDimitry Andric   NoTsanAtomicCAS(a, &c, v, mo, fmo);
40368d75effSDimitry Andric   return c;
40468d75effSDimitry Andric }
40568d75effSDimitry Andric 
40668d75effSDimitry Andric template <typename T>
AtomicCAS(ThreadState * thr,uptr pc,volatile T * a,T * c,T v,morder mo,morder fmo)407349cc55cSDimitry Andric static bool AtomicCAS(ThreadState *thr, uptr pc, volatile T *a, T *c, T v,
408349cc55cSDimitry Andric                       morder mo, morder fmo) {
409fe6060f1SDimitry Andric   // 31.7.2.18: "The failure argument shall not be memory_order_release
410fe6060f1SDimitry Andric   // nor memory_order_acq_rel". LLVM (2021-05) fallbacks to Monotonic
411fe6060f1SDimitry Andric   // (mo_relaxed) when those are used.
412349cc55cSDimitry Andric   DCHECK(IsLoadOrder(fmo));
413fe6060f1SDimitry Andric 
414349cc55cSDimitry Andric   MemoryAccess(thr, pc, (uptr)a, AccessSize<T>(), kAccessWrite | kAccessAtomic);
415349cc55cSDimitry Andric   if (LIKELY(mo == mo_relaxed && fmo == mo_relaxed)) {
416349cc55cSDimitry Andric     T cc = *c;
417349cc55cSDimitry Andric     T pr = func_cas(a, cc, v);
418349cc55cSDimitry Andric     if (pr == cc)
419349cc55cSDimitry Andric       return true;
420349cc55cSDimitry Andric     *c = pr;
421349cc55cSDimitry Andric     return false;
422349cc55cSDimitry Andric   }
4230eae32dcSDimitry Andric   SlotLocker locker(thr);
424349cc55cSDimitry Andric   bool release = IsReleaseOrder(mo);
4250eae32dcSDimitry Andric   bool success;
4260eae32dcSDimitry Andric   {
4270eae32dcSDimitry Andric     auto s = ctx->metamap.GetSyncOrCreate(thr, pc, (uptr)a, false);
4280eae32dcSDimitry Andric     RWLock lock(&s->mtx, release);
429fe6060f1SDimitry Andric     T cc = *c;
430fe6060f1SDimitry Andric     T pr = func_cas(a, cc, v);
4310eae32dcSDimitry Andric     success = pr == cc;
432fe6060f1SDimitry Andric     if (!success) {
433fe6060f1SDimitry Andric       *c = pr;
434fe6060f1SDimitry Andric       mo = fmo;
435fe6060f1SDimitry Andric     }
436fe6060f1SDimitry Andric     if (success && IsAcqRelOrder(mo))
4370eae32dcSDimitry Andric       thr->clock.ReleaseAcquire(&s->clock);
438fe6060f1SDimitry Andric     else if (success && IsReleaseOrder(mo))
4390eae32dcSDimitry Andric       thr->clock.Release(&s->clock);
44068d75effSDimitry Andric     else if (IsAcquireOrder(mo))
4410eae32dcSDimitry Andric       thr->clock.Acquire(s->clock);
4420eae32dcSDimitry Andric   }
4430eae32dcSDimitry Andric   if (success && release)
4440eae32dcSDimitry Andric     IncrementEpoch(thr);
445fe6060f1SDimitry Andric   return success;
44668d75effSDimitry Andric }
44768d75effSDimitry Andric 
44868d75effSDimitry Andric template<typename T>
AtomicCAS(ThreadState * thr,uptr pc,volatile T * a,T c,T v,morder mo,morder fmo)44968d75effSDimitry Andric static T AtomicCAS(ThreadState *thr, uptr pc,
45068d75effSDimitry Andric     volatile T *a, T c, T v, morder mo, morder fmo) {
45168d75effSDimitry Andric   AtomicCAS(thr, pc, a, &c, v, mo, fmo);
45268d75effSDimitry Andric   return c;
45368d75effSDimitry Andric }
45468d75effSDimitry Andric 
45568d75effSDimitry Andric #if !SANITIZER_GO
NoTsanAtomicFence(morder mo)45668d75effSDimitry Andric static void NoTsanAtomicFence(morder mo) {
45768d75effSDimitry Andric   __sync_synchronize();
45868d75effSDimitry Andric }
45968d75effSDimitry Andric 
AtomicFence(ThreadState * thr,uptr pc,morder mo)46068d75effSDimitry Andric static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
46168d75effSDimitry Andric   // FIXME(dvyukov): not implemented.
46268d75effSDimitry Andric   __sync_synchronize();
46368d75effSDimitry Andric }
46468d75effSDimitry Andric #endif
46568d75effSDimitry Andric 
46668d75effSDimitry Andric // Interface functions follow.
46768d75effSDimitry Andric #if !SANITIZER_GO
46868d75effSDimitry Andric 
46968d75effSDimitry Andric // C/C++
47068d75effSDimitry Andric 
convert_morder(morder mo)47168d75effSDimitry Andric static morder convert_morder(morder mo) {
47268d75effSDimitry Andric   if (flags()->force_seq_cst_atomics)
47368d75effSDimitry Andric     return (morder)mo_seq_cst;
47468d75effSDimitry Andric 
47568d75effSDimitry Andric   // Filter out additional memory order flags:
47668d75effSDimitry Andric   // MEMMODEL_SYNC        = 1 << 15
47768d75effSDimitry Andric   // __ATOMIC_HLE_ACQUIRE = 1 << 16
47868d75effSDimitry Andric   // __ATOMIC_HLE_RELEASE = 1 << 17
47968d75effSDimitry Andric   //
48068d75effSDimitry Andric   // HLE is an optimization, and we pretend that elision always fails.
48168d75effSDimitry Andric   // MEMMODEL_SYNC is used when lowering __sync_ atomics,
48268d75effSDimitry Andric   // since we use __sync_ atomics for actual atomic operations,
48368d75effSDimitry Andric   // we can safely ignore it as well. It also subtly affects semantics,
48468d75effSDimitry Andric   // but we don't model the difference.
48568d75effSDimitry Andric   return (morder)(mo & 0x7fff);
48668d75effSDimitry Andric }
48768d75effSDimitry Andric 
488349cc55cSDimitry Andric #  define ATOMIC_IMPL(func, ...)                                \
48968d75effSDimitry Andric     ThreadState *const thr = cur_thread();                      \
49068d75effSDimitry Andric     ProcessPendingSignals(thr);                                 \
491349cc55cSDimitry Andric     if (UNLIKELY(thr->ignore_sync || thr->ignore_interceptors)) \
49268d75effSDimitry Andric       return NoTsanAtomic##func(__VA_ARGS__);                   \
49368d75effSDimitry Andric     mo = convert_morder(mo);                                    \
494349cc55cSDimitry Andric     return Atomic##func(thr, GET_CALLER_PC(), __VA_ARGS__);
49568d75effSDimitry Andric 
49668d75effSDimitry Andric extern "C" {
49768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_load(const volatile a8 * a,morder mo)49868d75effSDimitry Andric a8 __tsan_atomic8_load(const volatile a8 *a, morder mo) {
499349cc55cSDimitry Andric   ATOMIC_IMPL(Load, a, mo);
50068d75effSDimitry Andric }
50168d75effSDimitry Andric 
50268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_load(const volatile a16 * a,morder mo)50368d75effSDimitry Andric a16 __tsan_atomic16_load(const volatile a16 *a, morder mo) {
504349cc55cSDimitry Andric   ATOMIC_IMPL(Load, a, mo);
50568d75effSDimitry Andric }
50668d75effSDimitry Andric 
50768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_load(const volatile a32 * a,morder mo)50868d75effSDimitry Andric a32 __tsan_atomic32_load(const volatile a32 *a, morder mo) {
509349cc55cSDimitry Andric   ATOMIC_IMPL(Load, a, mo);
51068d75effSDimitry Andric }
51168d75effSDimitry Andric 
51268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_load(const volatile a64 * a,morder mo)51368d75effSDimitry Andric a64 __tsan_atomic64_load(const volatile a64 *a, morder mo) {
514349cc55cSDimitry Andric   ATOMIC_IMPL(Load, a, mo);
51568d75effSDimitry Andric }
51668d75effSDimitry Andric 
51768d75effSDimitry Andric #if __TSAN_HAS_INT128
51868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_load(const volatile a128 * a,morder mo)51968d75effSDimitry Andric a128 __tsan_atomic128_load(const volatile a128 *a, morder mo) {
520349cc55cSDimitry Andric   ATOMIC_IMPL(Load, a, mo);
52168d75effSDimitry Andric }
52268d75effSDimitry Andric #endif
52368d75effSDimitry Andric 
52468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_store(volatile a8 * a,a8 v,morder mo)52568d75effSDimitry Andric void __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo) {
526349cc55cSDimitry Andric   ATOMIC_IMPL(Store, a, v, mo);
52768d75effSDimitry Andric }
52868d75effSDimitry Andric 
52968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_store(volatile a16 * a,a16 v,morder mo)53068d75effSDimitry Andric void __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo) {
531349cc55cSDimitry Andric   ATOMIC_IMPL(Store, a, v, mo);
53268d75effSDimitry Andric }
53368d75effSDimitry Andric 
53468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_store(volatile a32 * a,a32 v,morder mo)53568d75effSDimitry Andric void __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo) {
536349cc55cSDimitry Andric   ATOMIC_IMPL(Store, a, v, mo);
53768d75effSDimitry Andric }
53868d75effSDimitry Andric 
53968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_store(volatile a64 * a,a64 v,morder mo)54068d75effSDimitry Andric void __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo) {
541349cc55cSDimitry Andric   ATOMIC_IMPL(Store, a, v, mo);
54268d75effSDimitry Andric }
54368d75effSDimitry Andric 
54468d75effSDimitry Andric #if __TSAN_HAS_INT128
54568d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_store(volatile a128 * a,a128 v,morder mo)54668d75effSDimitry Andric void __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo) {
547349cc55cSDimitry Andric   ATOMIC_IMPL(Store, a, v, mo);
54868d75effSDimitry Andric }
54968d75effSDimitry Andric #endif
55068d75effSDimitry Andric 
55168d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_exchange(volatile a8 * a,a8 v,morder mo)55268d75effSDimitry Andric a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, morder mo) {
553349cc55cSDimitry Andric   ATOMIC_IMPL(Exchange, a, v, mo);
55468d75effSDimitry Andric }
55568d75effSDimitry Andric 
55668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_exchange(volatile a16 * a,a16 v,morder mo)55768d75effSDimitry Andric a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, morder mo) {
558349cc55cSDimitry Andric   ATOMIC_IMPL(Exchange, a, v, mo);
55968d75effSDimitry Andric }
56068d75effSDimitry Andric 
56168d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_exchange(volatile a32 * a,a32 v,morder mo)56268d75effSDimitry Andric a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, morder mo) {
563349cc55cSDimitry Andric   ATOMIC_IMPL(Exchange, a, v, mo);
56468d75effSDimitry Andric }
56568d75effSDimitry Andric 
56668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_exchange(volatile a64 * a,a64 v,morder mo)56768d75effSDimitry Andric a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, morder mo) {
568349cc55cSDimitry Andric   ATOMIC_IMPL(Exchange, a, v, mo);
56968d75effSDimitry Andric }
57068d75effSDimitry Andric 
57168d75effSDimitry Andric #if __TSAN_HAS_INT128
57268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_exchange(volatile a128 * a,a128 v,morder mo)57368d75effSDimitry Andric a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, morder mo) {
574349cc55cSDimitry Andric   ATOMIC_IMPL(Exchange, a, v, mo);
57568d75effSDimitry Andric }
57668d75effSDimitry Andric #endif
57768d75effSDimitry Andric 
57868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_add(volatile a8 * a,a8 v,morder mo)57968d75effSDimitry Andric a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, morder mo) {
580349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAdd, a, v, mo);
58168d75effSDimitry Andric }
58268d75effSDimitry Andric 
58368d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_add(volatile a16 * a,a16 v,morder mo)58468d75effSDimitry Andric a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, morder mo) {
585349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAdd, a, v, mo);
58668d75effSDimitry Andric }
58768d75effSDimitry Andric 
58868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_add(volatile a32 * a,a32 v,morder mo)58968d75effSDimitry Andric a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, morder mo) {
590349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAdd, a, v, mo);
59168d75effSDimitry Andric }
59268d75effSDimitry Andric 
59368d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_add(volatile a64 * a,a64 v,morder mo)59468d75effSDimitry Andric a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, morder mo) {
595349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAdd, a, v, mo);
59668d75effSDimitry Andric }
59768d75effSDimitry Andric 
59868d75effSDimitry Andric #if __TSAN_HAS_INT128
59968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_add(volatile a128 * a,a128 v,morder mo)60068d75effSDimitry Andric a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, morder mo) {
601349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAdd, a, v, mo);
60268d75effSDimitry Andric }
60368d75effSDimitry Andric #endif
60468d75effSDimitry Andric 
60568d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_sub(volatile a8 * a,a8 v,morder mo)60668d75effSDimitry Andric a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, morder mo) {
607349cc55cSDimitry Andric   ATOMIC_IMPL(FetchSub, a, v, mo);
60868d75effSDimitry Andric }
60968d75effSDimitry Andric 
61068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_sub(volatile a16 * a,a16 v,morder mo)61168d75effSDimitry Andric a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, morder mo) {
612349cc55cSDimitry Andric   ATOMIC_IMPL(FetchSub, a, v, mo);
61368d75effSDimitry Andric }
61468d75effSDimitry Andric 
61568d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_sub(volatile a32 * a,a32 v,morder mo)61668d75effSDimitry Andric a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, morder mo) {
617349cc55cSDimitry Andric   ATOMIC_IMPL(FetchSub, a, v, mo);
61868d75effSDimitry Andric }
61968d75effSDimitry Andric 
62068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_sub(volatile a64 * a,a64 v,morder mo)62168d75effSDimitry Andric a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, morder mo) {
622349cc55cSDimitry Andric   ATOMIC_IMPL(FetchSub, a, v, mo);
62368d75effSDimitry Andric }
62468d75effSDimitry Andric 
62568d75effSDimitry Andric #if __TSAN_HAS_INT128
62668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_sub(volatile a128 * a,a128 v,morder mo)62768d75effSDimitry Andric a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, morder mo) {
628349cc55cSDimitry Andric   ATOMIC_IMPL(FetchSub, a, v, mo);
62968d75effSDimitry Andric }
63068d75effSDimitry Andric #endif
63168d75effSDimitry Andric 
63268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_and(volatile a8 * a,a8 v,morder mo)63368d75effSDimitry Andric a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, morder mo) {
634349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAnd, a, v, mo);
63568d75effSDimitry Andric }
63668d75effSDimitry Andric 
63768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_and(volatile a16 * a,a16 v,morder mo)63868d75effSDimitry Andric a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, morder mo) {
639349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAnd, a, v, mo);
64068d75effSDimitry Andric }
64168d75effSDimitry Andric 
64268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_and(volatile a32 * a,a32 v,morder mo)64368d75effSDimitry Andric a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, morder mo) {
644349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAnd, a, v, mo);
64568d75effSDimitry Andric }
64668d75effSDimitry Andric 
64768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_and(volatile a64 * a,a64 v,morder mo)64868d75effSDimitry Andric a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, morder mo) {
649349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAnd, a, v, mo);
65068d75effSDimitry Andric }
65168d75effSDimitry Andric 
65268d75effSDimitry Andric #if __TSAN_HAS_INT128
65368d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_and(volatile a128 * a,a128 v,morder mo)65468d75effSDimitry Andric a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, morder mo) {
655349cc55cSDimitry Andric   ATOMIC_IMPL(FetchAnd, a, v, mo);
65668d75effSDimitry Andric }
65768d75effSDimitry Andric #endif
65868d75effSDimitry Andric 
65968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_or(volatile a8 * a,a8 v,morder mo)66068d75effSDimitry Andric a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, morder mo) {
661349cc55cSDimitry Andric   ATOMIC_IMPL(FetchOr, a, v, mo);
66268d75effSDimitry Andric }
66368d75effSDimitry Andric 
66468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_or(volatile a16 * a,a16 v,morder mo)66568d75effSDimitry Andric a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, morder mo) {
666349cc55cSDimitry Andric   ATOMIC_IMPL(FetchOr, a, v, mo);
66768d75effSDimitry Andric }
66868d75effSDimitry Andric 
66968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_or(volatile a32 * a,a32 v,morder mo)67068d75effSDimitry Andric a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, morder mo) {
671349cc55cSDimitry Andric   ATOMIC_IMPL(FetchOr, a, v, mo);
67268d75effSDimitry Andric }
67368d75effSDimitry Andric 
67468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_or(volatile a64 * a,a64 v,morder mo)67568d75effSDimitry Andric a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, morder mo) {
676349cc55cSDimitry Andric   ATOMIC_IMPL(FetchOr, a, v, mo);
67768d75effSDimitry Andric }
67868d75effSDimitry Andric 
67968d75effSDimitry Andric #if __TSAN_HAS_INT128
68068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_or(volatile a128 * a,a128 v,morder mo)68168d75effSDimitry Andric a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, morder mo) {
682349cc55cSDimitry Andric   ATOMIC_IMPL(FetchOr, a, v, mo);
68368d75effSDimitry Andric }
68468d75effSDimitry Andric #endif
68568d75effSDimitry Andric 
68668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_xor(volatile a8 * a,a8 v,morder mo)68768d75effSDimitry Andric a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, morder mo) {
688349cc55cSDimitry Andric   ATOMIC_IMPL(FetchXor, a, v, mo);
68968d75effSDimitry Andric }
69068d75effSDimitry Andric 
69168d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_xor(volatile a16 * a,a16 v,morder mo)69268d75effSDimitry Andric a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, morder mo) {
693349cc55cSDimitry Andric   ATOMIC_IMPL(FetchXor, a, v, mo);
69468d75effSDimitry Andric }
69568d75effSDimitry Andric 
69668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_xor(volatile a32 * a,a32 v,morder mo)69768d75effSDimitry Andric a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, morder mo) {
698349cc55cSDimitry Andric   ATOMIC_IMPL(FetchXor, a, v, mo);
69968d75effSDimitry Andric }
70068d75effSDimitry Andric 
70168d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_xor(volatile a64 * a,a64 v,morder mo)70268d75effSDimitry Andric a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, morder mo) {
703349cc55cSDimitry Andric   ATOMIC_IMPL(FetchXor, a, v, mo);
70468d75effSDimitry Andric }
70568d75effSDimitry Andric 
70668d75effSDimitry Andric #if __TSAN_HAS_INT128
70768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_xor(volatile a128 * a,a128 v,morder mo)70868d75effSDimitry Andric a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, morder mo) {
709349cc55cSDimitry Andric   ATOMIC_IMPL(FetchXor, a, v, mo);
71068d75effSDimitry Andric }
71168d75effSDimitry Andric #endif
71268d75effSDimitry Andric 
71368d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_fetch_nand(volatile a8 * a,a8 v,morder mo)71468d75effSDimitry Andric a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, morder mo) {
715349cc55cSDimitry Andric   ATOMIC_IMPL(FetchNand, a, v, mo);
71668d75effSDimitry Andric }
71768d75effSDimitry Andric 
71868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_fetch_nand(volatile a16 * a,a16 v,morder mo)71968d75effSDimitry Andric a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, morder mo) {
720349cc55cSDimitry Andric   ATOMIC_IMPL(FetchNand, a, v, mo);
72168d75effSDimitry Andric }
72268d75effSDimitry Andric 
72368d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_fetch_nand(volatile a32 * a,a32 v,morder mo)72468d75effSDimitry Andric a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, morder mo) {
725349cc55cSDimitry Andric   ATOMIC_IMPL(FetchNand, a, v, mo);
72668d75effSDimitry Andric }
72768d75effSDimitry Andric 
72868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_fetch_nand(volatile a64 * a,a64 v,morder mo)72968d75effSDimitry Andric a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, morder mo) {
730349cc55cSDimitry Andric   ATOMIC_IMPL(FetchNand, a, v, mo);
73168d75effSDimitry Andric }
73268d75effSDimitry Andric 
73368d75effSDimitry Andric #if __TSAN_HAS_INT128
73468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_fetch_nand(volatile a128 * a,a128 v,morder mo)73568d75effSDimitry Andric a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, morder mo) {
736349cc55cSDimitry Andric   ATOMIC_IMPL(FetchNand, a, v, mo);
73768d75effSDimitry Andric }
73868d75effSDimitry Andric #endif
73968d75effSDimitry Andric 
74068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_compare_exchange_strong(volatile a8 * a,a8 * c,a8 v,morder mo,morder fmo)74168d75effSDimitry Andric int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v,
74268d75effSDimitry Andric     morder mo, morder fmo) {
743349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
74468d75effSDimitry Andric }
74568d75effSDimitry Andric 
74668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_compare_exchange_strong(volatile a16 * a,a16 * c,a16 v,morder mo,morder fmo)74768d75effSDimitry Andric int __tsan_atomic16_compare_exchange_strong(volatile a16 *a, a16 *c, a16 v,
74868d75effSDimitry Andric     morder mo, morder fmo) {
749349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
75068d75effSDimitry Andric }
75168d75effSDimitry Andric 
75268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_compare_exchange_strong(volatile a32 * a,a32 * c,a32 v,morder mo,morder fmo)75368d75effSDimitry Andric int __tsan_atomic32_compare_exchange_strong(volatile a32 *a, a32 *c, a32 v,
75468d75effSDimitry Andric     morder mo, morder fmo) {
755349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
75668d75effSDimitry Andric }
75768d75effSDimitry Andric 
75868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_compare_exchange_strong(volatile a64 * a,a64 * c,a64 v,morder mo,morder fmo)75968d75effSDimitry Andric int __tsan_atomic64_compare_exchange_strong(volatile a64 *a, a64 *c, a64 v,
76068d75effSDimitry Andric     morder mo, morder fmo) {
761349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
76268d75effSDimitry Andric }
76368d75effSDimitry Andric 
76468d75effSDimitry Andric #if __TSAN_HAS_INT128
76568d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_compare_exchange_strong(volatile a128 * a,a128 * c,a128 v,morder mo,morder fmo)76668d75effSDimitry Andric int __tsan_atomic128_compare_exchange_strong(volatile a128 *a, a128 *c, a128 v,
76768d75effSDimitry Andric     morder mo, morder fmo) {
768349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
76968d75effSDimitry Andric }
77068d75effSDimitry Andric #endif
77168d75effSDimitry Andric 
77268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_compare_exchange_weak(volatile a8 * a,a8 * c,a8 v,morder mo,morder fmo)77368d75effSDimitry Andric int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v,
77468d75effSDimitry Andric     morder mo, morder fmo) {
775349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
77668d75effSDimitry Andric }
77768d75effSDimitry Andric 
77868d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_compare_exchange_weak(volatile a16 * a,a16 * c,a16 v,morder mo,morder fmo)77968d75effSDimitry Andric int __tsan_atomic16_compare_exchange_weak(volatile a16 *a, a16 *c, a16 v,
78068d75effSDimitry Andric     morder mo, morder fmo) {
781349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
78268d75effSDimitry Andric }
78368d75effSDimitry Andric 
78468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_compare_exchange_weak(volatile a32 * a,a32 * c,a32 v,morder mo,morder fmo)78568d75effSDimitry Andric int __tsan_atomic32_compare_exchange_weak(volatile a32 *a, a32 *c, a32 v,
78668d75effSDimitry Andric     morder mo, morder fmo) {
787349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
78868d75effSDimitry Andric }
78968d75effSDimitry Andric 
79068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_compare_exchange_weak(volatile a64 * a,a64 * c,a64 v,morder mo,morder fmo)79168d75effSDimitry Andric int __tsan_atomic64_compare_exchange_weak(volatile a64 *a, a64 *c, a64 v,
79268d75effSDimitry Andric     morder mo, morder fmo) {
793349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
79468d75effSDimitry Andric }
79568d75effSDimitry Andric 
79668d75effSDimitry Andric #if __TSAN_HAS_INT128
79768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_compare_exchange_weak(volatile a128 * a,a128 * c,a128 v,morder mo,morder fmo)79868d75effSDimitry Andric int __tsan_atomic128_compare_exchange_weak(volatile a128 *a, a128 *c, a128 v,
79968d75effSDimitry Andric     morder mo, morder fmo) {
800349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
80168d75effSDimitry Andric }
80268d75effSDimitry Andric #endif
80368d75effSDimitry Andric 
80468d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic8_compare_exchange_val(volatile a8 * a,a8 c,a8 v,morder mo,morder fmo)80568d75effSDimitry Andric a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v,
80668d75effSDimitry Andric     morder mo, morder fmo) {
807349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
80868d75effSDimitry Andric }
80968d75effSDimitry Andric 
81068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic16_compare_exchange_val(volatile a16 * a,a16 c,a16 v,morder mo,morder fmo)81168d75effSDimitry Andric a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v,
81268d75effSDimitry Andric     morder mo, morder fmo) {
813349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
81468d75effSDimitry Andric }
81568d75effSDimitry Andric 
81668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic32_compare_exchange_val(volatile a32 * a,a32 c,a32 v,morder mo,morder fmo)81768d75effSDimitry Andric a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v,
81868d75effSDimitry Andric     morder mo, morder fmo) {
819349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
82068d75effSDimitry Andric }
82168d75effSDimitry Andric 
82268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic64_compare_exchange_val(volatile a64 * a,a64 c,a64 v,morder mo,morder fmo)82368d75effSDimitry Andric a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v,
82468d75effSDimitry Andric     morder mo, morder fmo) {
825349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
82668d75effSDimitry Andric }
82768d75effSDimitry Andric 
82868d75effSDimitry Andric #if __TSAN_HAS_INT128
82968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic128_compare_exchange_val(volatile a128 * a,a128 c,a128 v,morder mo,morder fmo)83068d75effSDimitry Andric a128 __tsan_atomic128_compare_exchange_val(volatile a128 *a, a128 c, a128 v,
83168d75effSDimitry Andric     morder mo, morder fmo) {
832349cc55cSDimitry Andric   ATOMIC_IMPL(CAS, a, c, v, mo, fmo);
83368d75effSDimitry Andric }
83468d75effSDimitry Andric #endif
83568d75effSDimitry Andric 
83668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic_thread_fence(morder mo)837349cc55cSDimitry Andric void __tsan_atomic_thread_fence(morder mo) { ATOMIC_IMPL(Fence, mo); }
83868d75effSDimitry Andric 
83968d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_atomic_signal_fence(morder mo)84068d75effSDimitry Andric void __tsan_atomic_signal_fence(morder mo) {
84168d75effSDimitry Andric }
84268d75effSDimitry Andric }  // extern "C"
84368d75effSDimitry Andric 
84468d75effSDimitry Andric #else  // #if !SANITIZER_GO
84568d75effSDimitry Andric 
84668d75effSDimitry Andric // Go
84768d75effSDimitry Andric 
84868d75effSDimitry Andric #  define ATOMIC(func, ...)               \
84968d75effSDimitry Andric     if (thr->ignore_sync) {               \
85068d75effSDimitry Andric       NoTsanAtomic##func(__VA_ARGS__);    \
85168d75effSDimitry Andric     } else {                              \
85268d75effSDimitry Andric       FuncEntry(thr, cpc);                \
85368d75effSDimitry Andric       Atomic##func(thr, pc, __VA_ARGS__); \
85468d75effSDimitry Andric       FuncExit(thr);                      \
855349cc55cSDimitry Andric     }
85668d75effSDimitry Andric 
85768d75effSDimitry Andric #  define ATOMIC_RET(func, ret, ...)              \
85868d75effSDimitry Andric     if (thr->ignore_sync) {                       \
85968d75effSDimitry Andric       (ret) = NoTsanAtomic##func(__VA_ARGS__);    \
86068d75effSDimitry Andric     } else {                                      \
86168d75effSDimitry Andric       FuncEntry(thr, cpc);                        \
86268d75effSDimitry Andric       (ret) = Atomic##func(thr, pc, __VA_ARGS__); \
86368d75effSDimitry Andric       FuncExit(thr);                              \
864349cc55cSDimitry Andric     }
86568d75effSDimitry Andric 
86668d75effSDimitry Andric extern "C" {
86768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_load(ThreadState * thr,uptr cpc,uptr pc,u8 * a)86868d75effSDimitry Andric void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
86968d75effSDimitry Andric   ATOMIC_RET(Load, *(a32*)(a+8), *(a32**)a, mo_acquire);
87068d75effSDimitry Andric }
87168d75effSDimitry Andric 
87268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_load(ThreadState * thr,uptr cpc,uptr pc,u8 * a)87368d75effSDimitry Andric void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
87468d75effSDimitry Andric   ATOMIC_RET(Load, *(a64*)(a+8), *(a64**)a, mo_acquire);
87568d75effSDimitry Andric }
87668d75effSDimitry Andric 
87768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_store(ThreadState * thr,uptr cpc,uptr pc,u8 * a)87868d75effSDimitry Andric void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
87968d75effSDimitry Andric   ATOMIC(Store, *(a32**)a, *(a32*)(a+8), mo_release);
88068d75effSDimitry Andric }
88168d75effSDimitry Andric 
88268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_store(ThreadState * thr,uptr cpc,uptr pc,u8 * a)88368d75effSDimitry Andric void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
88468d75effSDimitry Andric   ATOMIC(Store, *(a64**)a, *(a64*)(a+8), mo_release);
88568d75effSDimitry Andric }
88668d75effSDimitry Andric 
88768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_fetch_add(ThreadState * thr,uptr cpc,uptr pc,u8 * a)88868d75effSDimitry Andric void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
88968d75effSDimitry Andric   ATOMIC_RET(FetchAdd, *(a32*)(a+16), *(a32**)a, *(a32*)(a+8), mo_acq_rel);
89068d75effSDimitry Andric }
89168d75effSDimitry Andric 
89268d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_fetch_add(ThreadState * thr,uptr cpc,uptr pc,u8 * a)89368d75effSDimitry Andric void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
89468d75effSDimitry Andric   ATOMIC_RET(FetchAdd, *(a64*)(a+16), *(a64**)a, *(a64*)(a+8), mo_acq_rel);
89568d75effSDimitry Andric }
89668d75effSDimitry Andric 
89768d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_fetch_and(ThreadState * thr,uptr cpc,uptr pc,u8 * a)898*5f757f3fSDimitry Andric void __tsan_go_atomic32_fetch_and(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
899*5f757f3fSDimitry Andric   ATOMIC_RET(FetchAnd, *(a32 *)(a + 16), *(a32 **)a, *(a32 *)(a + 8),
900*5f757f3fSDimitry Andric              mo_acq_rel);
901*5f757f3fSDimitry Andric }
902*5f757f3fSDimitry Andric 
903*5f757f3fSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_fetch_and(ThreadState * thr,uptr cpc,uptr pc,u8 * a)904*5f757f3fSDimitry Andric void __tsan_go_atomic64_fetch_and(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
905*5f757f3fSDimitry Andric   ATOMIC_RET(FetchAnd, *(a64 *)(a + 16), *(a64 **)a, *(a64 *)(a + 8),
906*5f757f3fSDimitry Andric              mo_acq_rel);
907*5f757f3fSDimitry Andric }
908*5f757f3fSDimitry Andric 
909*5f757f3fSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_fetch_or(ThreadState * thr,uptr cpc,uptr pc,u8 * a)910*5f757f3fSDimitry Andric void __tsan_go_atomic32_fetch_or(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
911*5f757f3fSDimitry Andric   ATOMIC_RET(FetchOr, *(a32 *)(a + 16), *(a32 **)a, *(a32 *)(a + 8),
912*5f757f3fSDimitry Andric              mo_acq_rel);
913*5f757f3fSDimitry Andric }
914*5f757f3fSDimitry Andric 
915*5f757f3fSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_fetch_or(ThreadState * thr,uptr cpc,uptr pc,u8 * a)916*5f757f3fSDimitry Andric void __tsan_go_atomic64_fetch_or(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
917*5f757f3fSDimitry Andric   ATOMIC_RET(FetchOr, *(a64 *)(a + 16), *(a64 **)a, *(a64 *)(a + 8),
918*5f757f3fSDimitry Andric              mo_acq_rel);
919*5f757f3fSDimitry Andric }
920*5f757f3fSDimitry Andric 
921*5f757f3fSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_exchange(ThreadState * thr,uptr cpc,uptr pc,u8 * a)92268d75effSDimitry Andric void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
92368d75effSDimitry Andric   ATOMIC_RET(Exchange, *(a32*)(a+16), *(a32**)a, *(a32*)(a+8), mo_acq_rel);
92468d75effSDimitry Andric }
92568d75effSDimitry Andric 
92668d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_exchange(ThreadState * thr,uptr cpc,uptr pc,u8 * a)92768d75effSDimitry Andric void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
92868d75effSDimitry Andric   ATOMIC_RET(Exchange, *(a64*)(a+16), *(a64**)a, *(a64*)(a+8), mo_acq_rel);
92968d75effSDimitry Andric }
93068d75effSDimitry Andric 
93168d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic32_compare_exchange(ThreadState * thr,uptr cpc,uptr pc,u8 * a)93268d75effSDimitry Andric void __tsan_go_atomic32_compare_exchange(
93368d75effSDimitry Andric     ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
93468d75effSDimitry Andric   a32 cur = 0;
93568d75effSDimitry Andric   a32 cmp = *(a32*)(a+8);
93668d75effSDimitry Andric   ATOMIC_RET(CAS, cur, *(a32**)a, cmp, *(a32*)(a+12), mo_acq_rel, mo_acquire);
93768d75effSDimitry Andric   *(bool*)(a+16) = (cur == cmp);
93868d75effSDimitry Andric }
93968d75effSDimitry Andric 
94068d75effSDimitry Andric SANITIZER_INTERFACE_ATTRIBUTE
__tsan_go_atomic64_compare_exchange(ThreadState * thr,uptr cpc,uptr pc,u8 * a)94168d75effSDimitry Andric void __tsan_go_atomic64_compare_exchange(
94268d75effSDimitry Andric     ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
94368d75effSDimitry Andric   a64 cur = 0;
94468d75effSDimitry Andric   a64 cmp = *(a64*)(a+8);
94568d75effSDimitry Andric   ATOMIC_RET(CAS, cur, *(a64**)a, cmp, *(a64*)(a+16), mo_acq_rel, mo_acquire);
94668d75effSDimitry Andric   *(bool*)(a+24) = (cur == cmp);
94768d75effSDimitry Andric }
94868d75effSDimitry Andric }  // extern "C"
94968d75effSDimitry Andric #endif  // #if !SANITIZER_GO
950