Lines Matching refs:memory_order

28  enum memory_order: unspecified // enum class in C++20
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
77 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78 T load(memory_order m = memory_order_seq_cst) const noexcept;
81 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
86 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
89 memory_order s, memory_order f) volatile noexcept;
90 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
92 memory_order s, memory_order f) volatile noexcept;
94 memory_order s, memory_order f) noexcept;
96 memory_order m = memory_order_seq_cst) volatile noexcept;
98 memory_order m = memory_order_seq_cst) noexcept;
100 memory_order m = memory_order_seq_cst) volatile noexcept;
102 memory_order m = memory_order_seq_cst) noexcept;
104 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105 void wait(T, memory_order = memory_order::seq_cst) const noexcept;
128 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129 integral load(memory_order m = memory_order_seq_cst) const noexcept;
132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
138 memory_order m = memory_order_seq_cst) volatile noexcept;
139 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141 memory_order s, memory_order f) volatile noexcept;
143 memory_order s, memory_order f) noexcept;
145 memory_order s, memory_order f) volatile noexcept;
147 memory_order s, memory_order f) noexcept;
149 memory_order m = memory_order_seq_cst) volatile noexcept;
151 memory_order m = memory_order_seq_cst) noexcept;
153 memory_order m = memory_order_seq_cst) volatile noexcept;
155 memory_order m = memory_order_seq_cst) noexcept;
157 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
187 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188 void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
212 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213 T* load(memory_order m = memory_order_seq_cst) const noexcept;
216 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
224 memory_order s, memory_order f) volatile noexcept;
226 memory_order s, memory_order f) noexcept;
228 memory_order s, memory_order f) volatile noexcept;
230 memory_order s, memory_order f) noexcept;
232 memory_order m = memory_order_seq_cst) volatile noexcept;
234 memory_order m = memory_order_seq_cst) noexcept;
236 memory_order m = memory_order_seq_cst) volatile noexcept;
238 memory_order m = memory_order_seq_cst) noexcept;
239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
257 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258 void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
277 memory_order) noexcept;
280 memory_order) noexcept;
286 T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
288 T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
295 memory_order) noexcept;
298 memory_order) noexcept;
314 memory_order, memory_order) noexcept;
318 memory_order, memory_order) noexcept;
322 memory_order, memory_order) noexcept;
326 memory_order, memory_order) noexcept;
334 memory_order) noexcept;
337 memory_order) noexcept;
344 memory_order) noexcept;
347 memory_order) noexcept;
354 memory_order) noexcept;
357 memory_order) noexcept;
364 memory_order) noexcept;
367 memory_order) noexcept;
374 memory_order) noexcept;
377 memory_order) noexcept;
385 memory_order) noexcept;
388 memory_order) noexcept;
461 bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
462 bool test(memory_order m = memory_order_seq_cst) noexcept;
463 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
464 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
465 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
466 void clear(memory_order m = memory_order_seq_cst) noexcept;
468 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
469 void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
479 memory_order m) noexcept;
480 bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
484 memory_order m) noexcept;
485 bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
488 void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
489 void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
493 void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
494 void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
502 void atomic_thread_fence(memory_order m) noexcept;
503 void atomic_signal_fence(memory_order m) noexcept;
575 // Figure out what the underlying type for `memory_order` would be if it were
591 enum class memory_order : __memory_order_underlying_t {
600 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
601 inline constexpr auto memory_order_consume = memory_order::consume;
602 inline constexpr auto memory_order_acquire = memory_order::acquire;
603 inline constexpr auto memory_order_release = memory_order::release;
604 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
605 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
609 typedef enum memory_order {
616 } memory_order;
625 static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
626 "unexpected underlying type for std::memory_order");
667 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
677 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order)…
700 void __cxx_atomic_thread_fence(memory_order __order) {
705 void __cxx_atomic_signal_fence(memory_order __order) {
712 memory_order __order) {
720 memory_order __order) {
728 memory_order __order) {
737 _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
747 _Tp __value, memory_order __order) {
757 memory_order __order) {
768 memory_order __success, memory_order __failure) {
778 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
779 memory_order __failure) {
790 memory_order __success, memory_order __failure) {
800 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
801 memory_order __failure) {
824 _Td __delta, memory_order __order) {
832 memory_order __order) {
840 _Td __delta, memory_order __order) {
848 memory_order __order) {
856 _Tp __pattern, memory_order __order) {
864 _Tp __pattern, memory_order __order) {
872 _Tp __pattern, memory_order __order) {
880 memory_order __order) {
888 _Tp __pattern, memory_order __order) {
896 memory_order __order) {
922 void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
927 void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
944 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order)…
949 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCE…
955 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXC…
961 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
968 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __or…
973 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NO…
977 _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __o…
986 …se_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __f…
991 …tomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __f…
997 …se_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __f…
1002 …tomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __f…
1008 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __o…
1013 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _N…
1019 …ch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXC…
1024 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __o…
1030 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __o…
1035 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _N…
1040 …ch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXC…
1045 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __o…
1051 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order _…
1056 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) …
1062 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __…
1067 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _…
1073 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order _…
1078 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) …
1184 void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1191 void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
1199 _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1204 _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1210 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1220 _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1231 … _Tp* __expected, _Tp __value, memory_order, memory_order) {
1246 … _Tp* __expected, _Tp __value, memory_order, memory_order) {
1260 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1275 _Tp* __expected, _Tp __value, memory_order, memory_order) {
1289 _Td __delta, memory_order) {
1300 _Td __delta, memory_order) {
1311 ptrdiff_t __delta, memory_order) {
1322 ptrdiff_t __delta, memory_order) {
1333 _Td __delta, memory_order) {
1344 _Td __delta, memory_order) {
1355 _Tp __pattern, memory_order) {
1366 _Tp __pattern, memory_order) {
1377 _Tp __pattern, memory_order) {
1388 _Tp __pattern, memory_order) {
1399 _Tp __pattern, memory_order) {
1410 _Tp __pattern, memory_order) {
1503 memory_order __order;
1512 _LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1536 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1540 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1544 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1548 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1556 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1559 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1563 memory_order __s, memory_order __f) volatile _NOEXCEPT
1568 memory_order __s, memory_order __f) _NOEXCEPT
1573 memory_order __s, memory_order __f) volatile _NOEXCEPT
1578 memory_order __s, memory_order __f) _NOEXCEPT
1583 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1587 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1591 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1595 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1598 …_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_o…
1600 …_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_o…
1645 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1648 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1651 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1654 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1657 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1660 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1663 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1666 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1669 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1672 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1769 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1776 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1783 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1790 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1884 atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order
1893 atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NO…
1922 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1931 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1960 …plicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1968 atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) …
2016 memory_order __s, memory_order __f) _NOEXCEPT
2026 memory_order __s, memory_order __f) _NOEXCEPT
2039 memory_order __s, memory_order __f) _NOEXCEPT
2050 memory_order __s, memory_order __f) _NOEXCEPT
2080 memory_order __m) _NOEXCEPT
2090 memory_order __m) _NOEXCEPT
2148 …(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2155 …_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2180 …(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2187 …_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2227 …licit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2239 atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m…
2279 …licit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2291 atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m)…
2331 …licit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2343 atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m…
2355 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2358 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2362 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2365 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2368 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2371 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2375 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2378 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2427 atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2434 atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2455 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2462 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2483 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2490 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2512 bool __v, memory_order __m) _NOEXCEPT
2520 bool __v, memory_order __m) _NOEXCEPT
2557 atomic_thread_fence(memory_order __m) _NOEXCEPT
2564 atomic_signal_fence(memory_order __m) _NOEXCEPT