xref: /netbsd-src/external/gpl3/gcc/dist/libsanitizer/sanitizer_common/sanitizer_atomic_msvc.h (revision 413d532bcc3f62d122e56d92e13ac64825a40baf)
1 //===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2 //
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
5 //
6 //===----------------------------------------------------------------------===//
7 //
8 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
9 // Not intended for direct inclusion. Include sanitizer_atomic.h.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef SANITIZER_ATOMIC_MSVC_H
14 #define SANITIZER_ATOMIC_MSVC_H
15 
16 extern "C" void _ReadWriteBarrier();
17 #pragma intrinsic(_ReadWriteBarrier)
18 extern "C" void _mm_mfence();
19 #pragma intrinsic(_mm_mfence)
20 extern "C" void _mm_pause();
21 #pragma intrinsic(_mm_pause)
22 extern "C" long _InterlockedExchangeAdd(  // NOLINT
23     long volatile * Addend, long Value);  // NOLINT
24 #pragma intrinsic(_InterlockedExchangeAdd)
25 
26 #ifdef _WIN64
27 extern "C" void *_InterlockedCompareExchangePointer(
28     void *volatile *Destination,
29     void *Exchange, void *Comparand);
30 #pragma intrinsic(_InterlockedCompareExchangePointer)
31 #else
32 // There's no _InterlockedCompareExchangePointer intrinsic on x86,
33 // so call _InterlockedCompareExchange instead.
34 extern "C"
35 long __cdecl _InterlockedCompareExchange(  // NOLINT
36     long volatile *Destination,            // NOLINT
37     long Exchange, long Comparand);        // NOLINT
38 #pragma intrinsic(_InterlockedCompareExchange)
39 
40 inline static void *_InterlockedCompareExchangePointer(
41     void *volatile *Destination,
42     void *Exchange, void *Comparand) {
43   return reinterpret_cast<void*>(
44       _InterlockedCompareExchange(
45           reinterpret_cast<long volatile*>(Destination),  // NOLINT
46           reinterpret_cast<long>(Exchange),               // NOLINT
47           reinterpret_cast<long>(Comparand)));            // NOLINT
48 }
49 #endif
50 
51 namespace __sanitizer {
52 
53 INLINE void atomic_signal_fence(memory_order) {
54   _ReadWriteBarrier();
55 }
56 
57 INLINE void atomic_thread_fence(memory_order) {
58   _mm_mfence();
59 }
60 
61 INLINE void proc_yield(int cnt) {
62   for (int i = 0; i < cnt; i++)
63     _mm_pause();
64 }
65 
66 template<typename T>
67 INLINE typename T::Type atomic_load(
68     const volatile T *a, memory_order mo) {
69   DCHECK(mo & (memory_order_relaxed | memory_order_consume
70       | memory_order_acquire | memory_order_seq_cst));
71   DCHECK(!((uptr)a % sizeof(*a)));
72   typename T::Type v;
73   // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
74   if (mo == memory_order_relaxed) {
75     v = a->val_dont_use;
76   } else {
77     atomic_signal_fence(memory_order_seq_cst);
78     v = a->val_dont_use;
79     atomic_signal_fence(memory_order_seq_cst);
80   }
81   return v;
82 }
83 
84 template<typename T>
85 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
86   DCHECK(mo & (memory_order_relaxed | memory_order_release
87       | memory_order_seq_cst));
88   DCHECK(!((uptr)a % sizeof(*a)));
89   // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
90   if (mo == memory_order_relaxed) {
91     a->val_dont_use = v;
92   } else {
93     atomic_signal_fence(memory_order_seq_cst);
94     a->val_dont_use = v;
95     atomic_signal_fence(memory_order_seq_cst);
96   }
97   if (mo == memory_order_seq_cst)
98     atomic_thread_fence(memory_order_seq_cst);
99 }
100 
101 INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
102     u32 v, memory_order mo) {
103   (void)mo;
104   DCHECK(!((uptr)a % sizeof(*a)));
105   return (u32)_InterlockedExchangeAdd(
106       (volatile long*)&a->val_dont_use, (long)v);  // NOLINT
107 }
108 
109 INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
110     u8 v, memory_order mo) {
111   (void)mo;
112   DCHECK(!((uptr)a % sizeof(*a)));
113   __asm {
114     mov eax, a
115     mov cl, v
116     xchg [eax], cl  // NOLINT
117     mov v, cl
118   }
119   return v;
120 }
121 
122 INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
123     u16 v, memory_order mo) {
124   (void)mo;
125   DCHECK(!((uptr)a % sizeof(*a)));
126   __asm {
127     mov eax, a
128     mov cx, v
129     xchg [eax], cx  // NOLINT
130     mov v, cx
131   }
132   return v;
133 }
134 
135 INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a,
136                                            uptr *cmp,
137                                            uptr xchg,
138                                            memory_order mo) {
139   uptr cmpv = *cmp;
140   uptr prev = (uptr)_InterlockedCompareExchangePointer(
141       (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv);
142   if (prev == cmpv)
143     return true;
144   *cmp = prev;
145   return false;
146 }
147 
148 template<typename T>
149 INLINE bool atomic_compare_exchange_weak(volatile T *a,
150                                            typename T::Type *cmp,
151                                            typename T::Type xchg,
152                                            memory_order mo) {
153   return atomic_compare_exchange_strong(a, cmp, xchg, mo);
154 }
155 
156 }  // namespace __sanitizer
157 
158 #endif  // SANITIZER_ATOMIC_CLANG_H
159