Home
last modified time | relevance | path

Searched refs:memory_order_relaxed (Results 1 – 25 of 119) sorted by relevance

12345

/freebsd-src/contrib/llvm-project/llvm/include/llvm/ADT/
H A DStatistic.h68 uint64_t getValue() const { return Value.load(std::memory_order_relaxed); } in getValue()
74 Value.store(Val, std::memory_order_relaxed);
79 Value.fetch_add(1, std::memory_order_relaxed);
85 return Value.fetch_add(1, std::memory_order_relaxed);
89 Value.fetch_sub(1, std::memory_order_relaxed);
95 return Value.fetch_sub(1, std::memory_order_relaxed);
101 Value.fetch_add(V, std::memory_order_relaxed);
108 Value.fetch_sub(V, std::memory_order_relaxed);
113 uint64_t PrevMax = Value.load(std::memory_order_relaxed); in updateMax()
117 PrevMax, V, std::memory_order_relaxed)) { in updateMax()
/freebsd-src/contrib/llvm-project/compiler-rt/lib/sanitizer_common/
H A Dsanitizer_addrhashmap.h234 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire()
236 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire()
239 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
255 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
267 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire()
271 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
295 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire()
310 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire()
322 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire()
328 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire()
[all …]
H A Dsanitizer_atomic_clang_other.h
H A Dsanitizer_atomic_clang_x86.h
H A Dsanitizer_mutex.h26 atomic_store(&state_, 0, memory_order_relaxed); in Init()
44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked()
186 state = atomic_load(&state_, memory_order_relaxed); in Lock()
206 state = atomic_load(&state_, memory_order_relaxed); in Lock()
270 state = atomic_load(&state_, memory_order_relaxed); in ReadLock()
286 state = atomic_load(&state_, memory_order_relaxed); in ReadLock()
317 CHECK(atomic_load(&state_, memory_order_relaxed) & kWriterLock); in CheckWriteLocked()
323 CHECK(atomic_load(&state_, memory_order_relaxed) & kReaderLockMask); in CheckReadLocked()
H A Dsanitizer_allocator_stats.h30 atomic_fetch_add(&stats_[i], v, memory_order_relaxed); in Add()
34 atomic_fetch_sub(&stats_[i], v, memory_order_relaxed); in Sub()
38 atomic_store(&stats_[i], v, memory_order_relaxed); in Set()
42 return atomic_load(&stats_[i], memory_order_relaxed); in Get()
H A Dsanitizer_atomic_clang_mips.h
H A Dsanitizer_atomic.h25 memory_order_relaxed = __ATOMIC_RELAXED,
32 memory_order_relaxed = 1 << 0,
88 return atomic_load(a, memory_order_relaxed);
93 atomic_store(a, v, memory_order_relaxed);
21 memory_order_relaxed = 1 << 0, global() enumerator
H A Dsanitizer_allocator.cpp40 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) == in internal_allocator()
190 return atomic_load(&allocator_may_return_null, memory_order_relaxed); in AllocatorMayReturnNull()
195 memory_order_relaxed); in SetAllocatorMayReturnNull()
206 return atomic_load(&rss_limit_exceeded, memory_order_relaxed); in IsRssLimitExceeded()
210 atomic_store(&rss_limit_exceeded, limit_exceeded, memory_order_relaxed); in SetRssLimitExceeded()
H A Dsanitizer_lfstack.h28 atomic_store(&head_, 0, memory_order_relaxed); in Clear()
32 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; in Empty()
36 u64 cmp = atomic_load(&head_, memory_order_relaxed); in Push()
H A Dsanitizer_tls_get_addr.cpp43 atomic_fetch_sub(&number_of_live_dtls, 1, memory_order_relaxed); in DTLS_Deallocate()
62 atomic_fetch_add(&number_of_live_dtls, 1, memory_order_relaxed); in DTLS_NextBlock()
128 atomic_load(&number_of_live_dtls, memory_order_relaxed)); in DTLS_on_tls_get_addr()
164 return atomic_load(&dtls->dtv_block, memory_order_relaxed) == in DTLSInDestruction()
H A Dsanitizer_atomic_msvc.h73 DCHECK(mo == memory_order_relaxed || mo == memory_order_consume || in atomic_load()
78 if (mo == memory_order_relaxed) { in atomic_load()
90 DCHECK(mo == memory_order_relaxed || mo == memory_order_release || in atomic_store()
94 if (mo == memory_order_relaxed) { in atomic_store()
/freebsd-src/contrib/llvm-project/compiler-rt/lib/tsan/rtl/
H A Dtsan_fd.cpp59 atomic_store(&s->rc, 1, memory_order_relaxed); in allocsync()
64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref()
65 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); in ref()
70 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref()
112 atomic_load(&d->aux_sync, memory_order_relaxed))); in init()
113 atomic_store(&d->aux_sync, 0, memory_order_relaxed); in init()
140 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed); in FdInit()
141 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed); in FdInit()
142 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed); in FdInit()
150 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork()
[all …]
H A Dtsan_external.cpp35 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; in GetTagData()
50 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); in TagFromShadowStackFrame()
69 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in ExternalAccess()
85 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed); in __tsan_external_register_tag()
108 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); in __tsan_external_assign_tag()
/freebsd-src/contrib/llvm-project/compiler-rt/lib/orc/
H A Ddebug.cpp37 DebugTypes.store(&DebugTypesAll, std::memory_order_relaxed); in initializeDebug()
44 DebugTypes.store(DT, std::memory_order_relaxed); in initializeDebug()
50 DebugTypes.store(&DebugTypesNone, std::memory_order_relaxed); in initializeDebug()
/freebsd-src/contrib/llvm-project/libcxx/include/__stop_token/
H A Datomic_unique_lock.h98 _State __current_state = __state_.load(std::memory_order_relaxed); in __lock_impl()
106 __state_.wait(__current_state, std::memory_order_relaxed); in __lock_impl()
110 __current_state = __state_.load(std::memory_order_relaxed); in __lock_impl()
127 …std::memory_order_relaxed // fail to exchange order. We don't need any ordering as we are going ba… in __lock_impl()
H A Dstop_state.h73 __state_.load(std::memory_order_relaxed) <= static_cast<__state_t>(~(1 << __stop_source_counter_shift)), in __increment_stop_source_counter()
75 __state_.fetch_add(1 << __stop_source_counter_shift, std::memory_order_relaxed); in __increment_stop_source_counter()
82 __state_.load(std::memory_order_relaxed) >= static_cast<__state_t>(1 << __stop_source_counter_shift), in __decrement_stop_source_counter()
84 __state_.fetch_sub(1 << __stop_source_counter_shift, std::memory_order_relaxed); in __decrement_stop_source_counter()
97 // Todo: Can this be std::memory_order_relaxed as the standard does not say anything except not to introduce data in __stop_possible_for_stop_token()
/freebsd-src/crypto/openssl/include/internal/
H A Dtsan_assist.h57 # define tsan_load(ptr) atomic_load_explicit((ptr), memory_order_relaxed)
58 # define tsan_store(ptr, val) atomic_store_explicit((ptr), (val), memory_order_relaxed)
59 # define tsan_counter(ptr) atomic_fetch_add_explicit((ptr), 1, memory_order_relaxed)
60 # define tsan_decr(ptr) atomic_fetch_add_explicit((ptr), -1, memory_order_relaxed)
/freebsd-src/contrib/llvm-project/libcxx/include/
H A Dbarrier132 auto const __old_phase = __phase_.load(memory_order_relaxed);
136 __expected_ += __expected_adjustment_.load(memory_order_relaxed);
137 __expected_adjustment_.store(0, memory_order_relaxed);
148 __expected_adjustment_.fetch_sub(1, memory_order_relaxed);
183 auto const __old_phase = __phase.load(memory_order_relaxed);
185 auto const new_expected = __expected.load(memory_order_relaxed);
192 __arrived.store(new_expected, memory_order_relaxed);
202 __expected.fetch_sub(1, memory_order_relaxed);
236 __phase_arrived_expected.fetch_add((__old & __expected_mask) << 32, memory_order_relaxed);
249 __phase_arrived_expected.fetch_add(__expected_unit, memory_order_relaxed);
[all...]
/freebsd-src/contrib/llvm-project/compiler-rt/lib/scudo/standalone/
H A Doptions.h53 memory_order_relaxed); in clear()
57 atomic_fetch_or(&Val, 1U << static_cast<u32>(Opt), memory_order_relaxed); in set()
68 memory_order_relaxed)); in setFillContentsMode()
H A Datomic_helpers.h17 memory_order_relaxed = 0, enumerator
24 static_assert(memory_order_relaxed == __ATOMIC_RELAXED, "");
127 return atomic_load(A, memory_order_relaxed); in atomic_load_relaxed()
132 atomic_store(A, V, memory_order_relaxed); in atomic_store_relaxed()
/freebsd-src/tools/test/stress2/misc/
H A Dvm_reserv_populate.sh97 memory_order_relaxed = 1 << 0,
115 assert(mo & (memory_order_relaxed | memory_order_consume
122 if (mo == memory_order_relaxed) {
/freebsd-src/contrib/llvm-project/lldb/source/Utility/
H A DLog.cpp99 MaskType mask = m_mask.fetch_or(flags, std::memory_order_relaxed); in Enable()
101 m_options.store(options, std::memory_order_relaxed); in Enable()
103 m_channel.log_ptr.store(this, std::memory_order_relaxed); in Enable()
110 MaskType mask = m_mask.fetch_and(~flags, std::memory_order_relaxed); in Disable()
113 m_channel.log_ptr.store(nullptr, std::memory_order_relaxed); in Disable()
128 return m_options.load(std::memory_order_relaxed); in GetOptions()
132 return m_mask.load(std::memory_order_relaxed); in GetMask()
315 return m_options.load(std::memory_order_relaxed) & LLDB_LOG_OPTION_VERBOSE; in GetVerbose()
/freebsd-src/contrib/llvm-project/libcxx/include/__atomic/
H A Dmemory_order.h43 inline constexpr auto memory_order_relaxed = memory_order::relaxed; variable
53 memory_order_relaxed = __mo_relaxed, enumerator
/freebsd-src/contrib/llvm-project/openmp/runtime/src/
H A Dkmp_lock.cpp609 std::memory_order_relaxed) - in __kmp_get_ticket_lock_owner()
615 std::memory_order_relaxed) != -1; in __kmp_is_ticket_lock_nestable()
627 &lck->lk.next_ticket, 1U, std::memory_order_relaxed); in __kmp_acquire_ticket_lock_timed_template()
631 std::memory_order_relaxed) != my_ticket) in __kmp_acquire_ticket_lock_timed_template()
654 std::memory_order_relaxed)) { in __kmp_acquire_ticket_lock_with_checks()
670 std::memory_order_relaxed); in __kmp_acquire_ticket_lock_with_checks()
676 std::memory_order_relaxed); in __kmp_test_ticket_lock()
679 std::memory_order_relaxed) == my_ticket) { in __kmp_test_ticket_lock()
695 std::memory_order_relaxed)) { in __kmp_test_ticket_lock_with_checks()
709 std::memory_order_relaxed); in __kmp_test_ticket_lock_with_checks()
[all …]

12345