xref: /freebsd-src/contrib/llvm-project/libcxx/include/__atomic/atomic_ref.h (revision 1199d38d8ec764ce8545888b4c091d00441842bf)
1 // -*- C++ -*-
2 //===----------------------------------------------------------------------===//
3 //
4 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5 // See https://llvm.org/LICENSE.txt for license information.
6 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //
8 //                        Kokkos v. 4.0
9 //       Copyright (2022) National Technology & Engineering
10 //               Solutions of Sandia, LLC (NTESS).
11 //
12 // Under the terms of Contract DE-NA0003525 with NTESS,
13 // the U.S. Government retains certain rights in this software.
14 //
15 //===---------------------------------------------------------------------===//
16 
17 #ifndef _LIBCPP___ATOMIC_ATOMIC_REF_H
18 #define _LIBCPP___ATOMIC_ATOMIC_REF_H
19 
20 #include <__assert>
21 #include <__atomic/atomic_sync.h>
22 #include <__atomic/check_memory_order.h>
23 #include <__atomic/to_gcc_order.h>
24 #include <__concepts/arithmetic.h>
25 #include <__concepts/same_as.h>
26 #include <__config>
27 #include <__memory/addressof.h>
28 #include <__type_traits/has_unique_object_representation.h>
29 #include <__type_traits/is_trivially_copyable.h>
30 #include <cstddef>
31 #include <cstdint>
32 #include <cstring>
33 
34 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
35 #  pragma GCC system_header
36 #endif
37 
38 _LIBCPP_PUSH_MACROS
39 #include <__undef_macros>
40 
41 _LIBCPP_BEGIN_NAMESPACE_STD
42 
43 #if _LIBCPP_STD_VER >= 20
44 
45 template <class _Tp>
46 struct __atomic_ref_base {
47 protected:
48   _Tp* __ptr_;
49 
50   _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}
51 
52 private:
53   _LIBCPP_HIDE_FROM_ABI static _Tp* __clear_padding(_Tp& __val) noexcept {
54     _Tp* __ptr = std::addressof(__val);
55 #  if __has_builtin(__builtin_clear_padding)
56     __builtin_clear_padding(__ptr);
57 #  endif
58     return __ptr;
59   }
60 
61   _LIBCPP_HIDE_FROM_ABI static bool __compare_exchange(
62       _Tp* __ptr, _Tp* __expected, _Tp* __desired, bool __is_weak, int __success, int __failure) noexcept {
63     if constexpr (
64 #  if __has_builtin(__builtin_clear_padding)
65         has_unique_object_representations_v<_Tp> || floating_point<_Tp>
66 #  else
67         true // NOLINT(readability-simplify-boolean-expr)
68 #  endif
69     ) {
70       return __atomic_compare_exchange(__ptr, __expected, __desired, __is_weak, __success, __failure);
71     } else { // _Tp has padding bits and __builtin_clear_padding is available
72       __clear_padding(*__desired);
73       _Tp __copy = *__expected;
74       __clear_padding(__copy);
75       // The algorithm we use here is basically to perform `__atomic_compare_exchange` on the
76       // values until it has either succeeded, or failed because the value representation of the
77       // objects involved was different. This is why we loop around __atomic_compare_exchange:
78       // we basically loop until its failure is caused by the value representation of the objects
79       // being different, not only their object representation.
80       while (true) {
81         _Tp __prev = __copy;
82         if (__atomic_compare_exchange(__ptr, std::addressof(__copy), __desired, __is_weak, __success, __failure)) {
83           return true;
84         }
85         _Tp __curr = __copy;
86         if (std::memcmp(__clear_padding(__prev), __clear_padding(__curr), sizeof(_Tp)) != 0) {
87           // Value representation without padding bits do not compare equal ->
88           // write the current content of *ptr into *expected
89           std::memcpy(__expected, std::addressof(__copy), sizeof(_Tp));
90           return false;
91         }
92       }
93     }
94   }
95 
96   friend struct __atomic_waitable_traits<__atomic_ref_base<_Tp>>;
97 
98 public:
99   using value_type = _Tp;
100 
101   static constexpr size_t required_alignment = alignof(_Tp);
102 
103   // The __atomic_always_lock_free builtin takes into account the alignment of the pointer if provided,
104   // so we create a fake pointer with a suitable alignment when querying it. Note that we are guaranteed
105   // that the pointer is going to be aligned properly at runtime because that is a (checked) precondition
106   // of atomic_ref's constructor.
107   static constexpr bool is_always_lock_free =
108       __atomic_always_lock_free(sizeof(_Tp), reinterpret_cast<void*>(-required_alignment));
109 
110   _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __atomic_is_lock_free(sizeof(_Tp), __ptr_); }
111 
112   _LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
113       _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
114     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
115         __order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,
116         "atomic_ref: memory order argument to atomic store operation is invalid");
117     __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
118   }
119 
120   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept {
121     store(__desired);
122     return __desired;
123   }
124 
125   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
126       _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {
127     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
128         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
129             __order == memory_order::seq_cst,
130         "atomic_ref: memory order argument to atomic load operation is invalid");
131     alignas(_Tp) byte __mem[sizeof(_Tp)];
132     auto* __ret = reinterpret_cast<_Tp*>(__mem);
133     __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
134     return *__ret;
135   }
136 
137   _LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); }
138 
139   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
140     alignas(_Tp) byte __mem[sizeof(_Tp)];
141     auto* __ret = reinterpret_cast<_Tp*>(__mem);
142     __atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));
143     return *__ret;
144   }
145 
146   _LIBCPP_HIDE_FROM_ABI bool
147   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
148       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
149     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
150         __failure == memory_order::relaxed || __failure == memory_order::consume ||
151             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
152         "atomic_ref: failure memory order argument to weak atomic compare-and-exchange operation is invalid");
153     return __compare_exchange(
154         __ptr_,
155         std::addressof(__expected),
156         std::addressof(__desired),
157         true,
158         std::__to_gcc_order(__success),
159         std::__to_gcc_order(__failure));
160   }
161   _LIBCPP_HIDE_FROM_ABI bool
162   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
163       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
164     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
165         __failure == memory_order::relaxed || __failure == memory_order::consume ||
166             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
167         "atomic_ref: failure memory order argument to strong atomic compare-and-exchange operation is invalid");
168     return __compare_exchange(
169         __ptr_,
170         std::addressof(__expected),
171         std::addressof(__desired),
172         false,
173         std::__to_gcc_order(__success),
174         std::__to_gcc_order(__failure));
175   }
176 
177   _LIBCPP_HIDE_FROM_ABI bool
178   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
179     return __compare_exchange(
180         __ptr_,
181         std::addressof(__expected),
182         std::addressof(__desired),
183         true,
184         std::__to_gcc_order(__order),
185         std::__to_gcc_failure_order(__order));
186   }
187   _LIBCPP_HIDE_FROM_ABI bool
188   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
189     return __compare_exchange(
190         __ptr_,
191         std::addressof(__expected),
192         std::addressof(__desired),
193         false,
194         std::__to_gcc_order(__order),
195         std::__to_gcc_failure_order(__order));
196   }
197 
198   _LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
199       _LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) {
200     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
201         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
202             __order == memory_order::seq_cst,
203         "atomic_ref: memory order argument to atomic wait operation is invalid");
204     std::__atomic_wait(*this, __old, __order);
205   }
206   _LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { std::__atomic_notify_one(*this); }
207   _LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { std::__atomic_notify_all(*this); }
208 };
209 
210 template <class _Tp>
211 struct __atomic_waitable_traits<__atomic_ref_base<_Tp>> {
212   static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {
213     return __a.load(__order);
214   }
215   static _LIBCPP_HIDE_FROM_ABI const _Tp* __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {
216     return __a.__ptr_;
217   }
218 };
219 
220 template <class _Tp>
221 struct atomic_ref : public __atomic_ref_base<_Tp> {
222   static_assert(is_trivially_copyable_v<_Tp>, "std::atomic_ref<T> requires that 'T' be a trivially copyable type");
223 
224   using __base = __atomic_ref_base<_Tp>;
225 
226   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
227     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
228         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
229         "atomic_ref ctor: referenced object must be aligned to required_alignment");
230   }
231 
232   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
233 
234   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
235 
236   atomic_ref& operator=(const atomic_ref&) = delete;
237 };
238 
239 template <class _Tp>
240   requires(std::integral<_Tp> && !std::same_as<bool, _Tp>)
241 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
242   using __base = __atomic_ref_base<_Tp>;
243 
244   using difference_type = typename __base::value_type;
245 
246   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
247     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
248         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
249         "atomic_ref ctor: referenced object must be aligned to required_alignment");
250   }
251 
252   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
253 
254   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
255 
256   atomic_ref& operator=(const atomic_ref&) = delete;
257 
258   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
259     return __atomic_fetch_add(this->__ptr_, __arg, std::__to_gcc_order(__order));
260   }
261   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
262     return __atomic_fetch_sub(this->__ptr_, __arg, std::__to_gcc_order(__order));
263   }
264   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
265     return __atomic_fetch_and(this->__ptr_, __arg, std::__to_gcc_order(__order));
266   }
267   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
268     return __atomic_fetch_or(this->__ptr_, __arg, std::__to_gcc_order(__order));
269   }
270   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
271     return __atomic_fetch_xor(this->__ptr_, __arg, std::__to_gcc_order(__order));
272   }
273 
274   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }
275   _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }
276   _LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }
277   _LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }
278   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
279   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
280   _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }
281   _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }
282   _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }
283 };
284 
285 template <class _Tp>
286   requires std::floating_point<_Tp>
287 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
288   using __base = __atomic_ref_base<_Tp>;
289 
290   using difference_type = typename __base::value_type;
291 
292   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
293     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
294         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
295         "atomic_ref ctor: referenced object must be aligned to required_alignment");
296   }
297 
298   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
299 
300   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
301 
302   atomic_ref& operator=(const atomic_ref&) = delete;
303 
304   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
305     _Tp __old = this->load(memory_order_relaxed);
306     _Tp __new = __old + __arg;
307     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
308       __new = __old + __arg;
309     }
310     return __old;
311   }
312   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
313     _Tp __old = this->load(memory_order_relaxed);
314     _Tp __new = __old - __arg;
315     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
316       __new = __old - __arg;
317     }
318     return __old;
319   }
320 
321   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
322   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
323 };
324 
325 template <class _Tp>
326 struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {
327   using __base = __atomic_ref_base<_Tp*>;
328 
329   using difference_type = ptrdiff_t;
330 
331   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}
332 
333   _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); }
334 
335   atomic_ref& operator=(const atomic_ref&) = delete;
336 
337   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
338     return __atomic_fetch_add(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
339   }
340   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
341     return __atomic_fetch_sub(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
342   }
343 
344   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); }
345   _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); }
346   _LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; }
347   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }
348   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; }
349   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; }
350 };
351 
352 _LIBCPP_CTAD_SUPPORTED_FOR_TYPE(atomic_ref);
353 
354 #endif // _LIBCPP_STD_VER >= 20
355 
356 _LIBCPP_END_NAMESPACE_STD
357 
358 _LIBCPP_POP_MACROS
359 
360 #endif // _LIBCPP__ATOMIC_ATOMIC_REF_H
361