/netbsd-src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/tests/ |
H A D | sanitizer_atomic_test.cc | 55 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 56 CheckStoreLoad<atomic_uint8_t, memory_order_consume, memory_order_relaxed>(); in TEST() 57 CheckStoreLoad<atomic_uint8_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 58 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_release>(); in TEST() 61 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 62 CheckStoreLoad<atomic_uint16_t, memory_order_consume, memory_order_relaxed>(); in TEST() 63 CheckStoreLoad<atomic_uint16_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 64 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_release>(); in TEST() 67 CheckStoreLoad<atomic_uint32_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 68 CheckStoreLoad<atomic_uint32_t, memory_order_consume, memory_order_relaxed>(); in TEST() [all …]
|
/netbsd-src/external/mpl/bind/dist/lib/isc/include/isc/ |
H A D | atomic.h | 32 atomic_store_explicit((o), (v), memory_order_relaxed) 33 #define atomic_load_relaxed(o) atomic_load_explicit((o), memory_order_relaxed) 35 atomic_fetch_add_explicit((o), (v), memory_order_relaxed) 37 atomic_fetch_sub_explicit((o), (v), memory_order_relaxed) 39 atomic_fetch_or_explicit((o), (v), memory_order_relaxed) 41 atomic_fetch_and_explicit((o), (v), memory_order_relaxed) 43 atomic_exchange_explicit((o), (v), memory_order_relaxed) 46 (o), (e), (d), memory_order_relaxed, memory_order_relaxed) 49 (o), (e), (d), memory_order_relaxed, memory_order_relaxe [all...] |
/netbsd-src/external/mpl/dhcp/bind/dist/lib/isc/include/isc/ |
H A D | atomic.h | 31 atomic_store_explicit((o), (v), memory_order_relaxed) 32 #define atomic_load_relaxed(o) atomic_load_explicit((o), memory_order_relaxed) 34 atomic_fetch_add_explicit((o), (v), memory_order_relaxed) 36 atomic_fetch_sub_explicit((o), (v), memory_order_relaxed) 38 atomic_fetch_or_explicit((o), (v), memory_order_relaxed) 40 atomic_fetch_and_explicit((o), (v), memory_order_relaxed) 42 atomic_exchange_explicit((o), (v), memory_order_relaxed) 45 (o), (e), (d), memory_order_relaxed, memory_order_relaxed) 48 (o), (e), (d), memory_order_relaxed, memory_order_relaxed)
|
/netbsd-src/external/mpl/dhcp/bind/include/isc/ |
H A D | atomic.h | 33 atomic_store_explicit((o), (v), memory_order_relaxed) 34 #define atomic_load_relaxed(o) atomic_load_explicit((o), memory_order_relaxed) 36 atomic_fetch_add_explicit((o), (v), memory_order_relaxed) 38 atomic_fetch_sub_explicit((o), (v), memory_order_relaxed) 40 atomic_fetch_or_explicit((o), (v), memory_order_relaxed) 42 atomic_fetch_and_explicit((o), (v), memory_order_relaxed) 44 atomic_exchange_explicit((o), (v), memory_order_relaxed) 47 (o), (e), (d), memory_order_relaxed, memory_order_relaxed) 50 (o), (e), (d), memory_order_relaxed, memory_order_relaxed)
|
/netbsd-src/external/apache2/llvm/dist/llvm/include/llvm/ADT/ |
H A D | Statistic.h | 67 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 73 Value.store(Val, std::memory_order_relaxed); 78 Value.fetch_add(1, std::memory_order_relaxed); 84 return Value.fetch_add(1, std::memory_order_relaxed); 88 Value.fetch_sub(1, std::memory_order_relaxed); 94 return Value.fetch_sub(1, std::memory_order_relaxed); 100 Value.fetch_add(V, std::memory_order_relaxed); 107 Value.fetch_sub(V, std::memory_order_relaxed); 112 unsigned PrevMax = Value.load(std::memory_order_relaxed); in updateMax() 116 PrevMax, V, std::memory_order_relaxed)) { in updateMax()
|
/netbsd-src/external/gpl3/gcc.old/dist/libsanitizer/sanitizer_common/ |
H A D | sanitizer_addrhashmap.h | 194 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 196 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 199 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 215 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 227 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 231 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 255 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 270 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire() 282 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire() 288 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() [all …]
|
H A D | sanitizer_mutex.h | 24 atomic_store(&state_, 0, memory_order_relaxed); in Init() 42 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0); in CheckLocked() 54 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow() 99 atomic_store(&state_, kUnlocked, memory_order_relaxed); in RWMutex() 103 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex() 135 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked() 153 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow()
|
H A D | sanitizer_allocator_stats.h | 33 v += atomic_load(&stats_[i], memory_order_relaxed); in Add() 34 atomic_store(&stats_[i], v, memory_order_relaxed); in Add() 38 v = atomic_load(&stats_[i], memory_order_relaxed) - v; in Sub() 39 atomic_store(&stats_[i], v, memory_order_relaxed); in Sub() 43 atomic_store(&stats_[i], v, memory_order_relaxed); in Set() 47 return atomic_load(&stats_[i], memory_order_relaxed); in Get()
|
H A D | sanitizer_atomic_clang_x86.h | 28 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 35 if (mo == memory_order_relaxed) { in atomic_load() 73 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 79 if (mo == memory_order_relaxed) { in atomic_store()
|
H A D | sanitizer_atomic_clang_other.h | 26 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 33 if (mo == memory_order_relaxed) { in atomic_load() 63 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 69 if (mo == memory_order_relaxed) { in atomic_store()
|
/netbsd-src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/ |
H A D | sanitizer_addrhashmap.h | 196 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 198 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 201 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 217 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 229 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 233 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 257 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 272 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire() 284 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire() 290 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() [all …]
|
H A D | sanitizer_mutex.h | 26 atomic_store(&state_, 0, memory_order_relaxed); in Init() 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 56 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow() 101 atomic_store(&state_, kUnlocked, memory_order_relaxed); in RWMutex() 105 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex() 137 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked() 155 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow()
|
H A D | sanitizer_allocator_stats.h | 35 v += atomic_load(&stats_[i], memory_order_relaxed); in Add() 36 atomic_store(&stats_[i], v, memory_order_relaxed); in Add() 40 v = atomic_load(&stats_[i], memory_order_relaxed) - v; in Sub() 41 atomic_store(&stats_[i], v, memory_order_relaxed); in Sub() 45 atomic_store(&stats_[i], v, memory_order_relaxed); in Set() 49 return atomic_load(&stats_[i], memory_order_relaxed); in Get()
|
H A D | sanitizer_atomic_clang_x86.h | 30 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 37 if (mo == memory_order_relaxed) { in atomic_load() 75 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 81 if (mo == memory_order_relaxed) { in atomic_store()
|
/netbsd-src/external/gpl3/gcc/dist/libsanitizer/sanitizer_common/ |
H A D | sanitizer_addrhashmap.h | 233 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 235 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 238 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 254 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 266 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 270 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 294 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 309 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire() 321 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire() 327 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() [all …]
|
H A D | sanitizer_allocator_stats.h | 34 v += atomic_load(&stats_[i], memory_order_relaxed); in Add() 35 atomic_store(&stats_[i], v, memory_order_relaxed); in Add() 39 v = atomic_load(&stats_[i], memory_order_relaxed) - v; in Sub() 40 atomic_store(&stats_[i], v, memory_order_relaxed); in Sub() 44 atomic_store(&stats_[i], v, memory_order_relaxed); in Set() 48 return atomic_load(&stats_[i], memory_order_relaxed); in Get()
|
H A D | sanitizer_mutex.h | 26 atomic_store(&state_, 0, memory_order_relaxed); in Init() 42 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 184 state = atomic_load(&state_, memory_order_relaxed); in Lock() 204 state = atomic_load(&state_, memory_order_relaxed); in Lock() 254 state = atomic_load(&state_, memory_order_relaxed); in ReadLock() 270 state = atomic_load(&state_, memory_order_relaxed); in ReadLock() 301 CHECK(atomic_load(&state_, memory_order_relaxed) & kWriterLock); in CheckWriteLocked() 307 CHECK(atomic_load(&state_, memory_order_relaxed) & kReaderLockMask); in CheckReadLocked()
|
H A D | sanitizer_atomic_clang_other.h | 27 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 34 if (mo == memory_order_relaxed) { in atomic_load() 61 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 67 if (mo == memory_order_relaxed) { in atomic_store()
|
H A D | sanitizer_atomic_clang_x86.h | 29 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 36 if (mo == memory_order_relaxed) { in atomic_load() 74 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 80 if (mo == memory_order_relaxed) { in atomic_store()
|
/netbsd-src/external/apache2/llvm/dist/libcxx/include/ |
H A D | barrier | 135 auto const __old_phase = __phase.load(memory_order_relaxed); 139 __expected += __expected_adjustment.load(memory_order_relaxed); 140 __expected_adjustment.store(0, memory_order_relaxed); 157 __expected_adjustment.fetch_sub(1, memory_order_relaxed); 199 auto const __old_phase = __phase.load(memory_order_relaxed); 201 auto const new_expected = __expected.load(memory_order_relaxed); 204 __arrived.store(new_expected, memory_order_relaxed); 218 __expected.fetch_sub(1, memory_order_relaxed); 259 … __phase_arrived_expected.fetch_add((__old & __expected_mask) << 32, memory_order_relaxed); 276 __phase_arrived_expected.fetch_add(__expected_unit, memory_order_relaxed);
|
/netbsd-src/external/gpl3/gcc/dist/libsanitizer/tsan/ |
H A D | tsan_external.cpp | 35 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; in GetTagData() 54 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); in TagFromShadowStackFrame() 64 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in ExternalAccess() 78 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed); in __tsan_external_register_tag() 101 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in __tsan_external_assign_tag()
|
/netbsd-src/external/gpl3/gcc.old/dist/libsanitizer/tsan/ |
H A D | tsan_fd.cc | 51 atomic_store(&s->rc, 1, memory_order_relaxed); in allocsync() 56 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref() 57 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); in ref() 62 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref() 121 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed); in FdInit() 122 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed); in FdInit() 123 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed); in FdInit() 131 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork() 143 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation()
|
H A D | tsan_external.cc | 30 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; in GetTagData() 49 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); in TagFromShadowStackFrame() 60 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in ExternalAccess() 75 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed); in __tsan_external_register_tag() 98 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in __tsan_external_assign_tag()
|
/netbsd-src/sys/external/bsd/compiler_rt/dist/lib/tsan/rtl/ |
H A D | tsan_fd.cc | 53 atomic_store(&s->rc, 1, memory_order_relaxed); in allocsync() 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref() 59 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); in ref() 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref() 123 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed); in FdInit() 124 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed); in FdInit() 125 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed); in FdInit() 133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork() 145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation()
|
H A D | tsan_external.cc | 32 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; in GetTagData() 51 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); in TagFromShadowStackFrame() 62 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in ExternalAccess() 77 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed); in __tsan_external_register_tag() 100 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in __tsan_external_assign_tag()
|