xref: /dpdk/lib/eal/include/rte_stdatomic.h (revision 3c4898ef762eeb2578b9ae3d7f6e3a0e5cbca8c8)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2023 Microsoft Corporation
3  */
4 
5 #ifndef RTE_STDATOMIC_H
6 #define RTE_STDATOMIC_H
7 
8 #include <assert.h>
9 
10 #ifdef __cplusplus
11 extern "C" {
12 #endif
13 
14 #ifdef RTE_ENABLE_STDATOMIC
15 #ifndef _MSC_VER
16 #ifdef __STDC_NO_ATOMICS__
17 #error enable_stdatomic=true but atomics not supported by toolchain
18 #endif
19 #endif
20 
21 #include <stdatomic.h>
22 
23 /* RTE_ATOMIC(type) is provided for use as a type specifier
24  * permitting designation of an rte atomic type.
25  */
26 #define RTE_ATOMIC(type) _Atomic(type)
27 
28 /* __rte_atomic is provided for type qualification permitting
29  * designation of an rte atomic qualified type-name.
30  */
31 #define __rte_atomic _Atomic
32 
33 /* The memory order is an enumerated type in C11. */
34 typedef memory_order rte_memory_order;
35 
36 #define rte_memory_order_relaxed memory_order_relaxed
37 #ifdef __ATOMIC_RELAXED
38 static_assert(rte_memory_order_relaxed == __ATOMIC_RELAXED,
39 	"rte_memory_order_relaxed == __ATOMIC_RELAXED");
40 #endif
41 
42 #define rte_memory_order_consume memory_order_consume
43 #ifdef __ATOMIC_CONSUME
44 static_assert(rte_memory_order_consume == __ATOMIC_CONSUME,
45 	"rte_memory_order_consume == __ATOMIC_CONSUME");
46 #endif
47 
48 #define rte_memory_order_acquire memory_order_acquire
49 #ifdef __ATOMIC_ACQUIRE
50 static_assert(rte_memory_order_acquire == __ATOMIC_ACQUIRE,
51 	"rte_memory_order_acquire == __ATOMIC_ACQUIRE");
52 #endif
53 
54 #define rte_memory_order_release memory_order_release
55 #ifdef __ATOMIC_RELEASE
56 static_assert(rte_memory_order_release == __ATOMIC_RELEASE,
57 	"rte_memory_order_release == __ATOMIC_RELEASE");
58 #endif
59 
60 #define rte_memory_order_acq_rel memory_order_acq_rel
61 #ifdef __ATOMIC_ACQ_REL
62 static_assert(rte_memory_order_acq_rel == __ATOMIC_ACQ_REL,
63 	"rte_memory_order_acq_rel == __ATOMIC_ACQ_REL");
64 #endif
65 
66 #define rte_memory_order_seq_cst memory_order_seq_cst
67 #ifdef __ATOMIC_SEQ_CST
68 static_assert(rte_memory_order_seq_cst == __ATOMIC_SEQ_CST,
69 	"rte_memory_order_seq_cst == __ATOMIC_SEQ_CST");
70 #endif
71 
72 #define rte_atomic_load_explicit(ptr, memorder) \
73 	atomic_load_explicit(ptr, memorder)
74 
75 #define rte_atomic_store_explicit(ptr, val, memorder) \
76 	atomic_store_explicit(ptr, val, memorder)
77 
78 #define rte_atomic_exchange_explicit(ptr, val, memorder) \
79 	atomic_exchange_explicit(ptr, val, memorder)
80 
81 #define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
82 		succ_memorder, fail_memorder) \
83 	atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
84 		succ_memorder, fail_memorder)
85 
86 #define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
87 		succ_memorder, fail_memorder) \
88 	atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
89 		succ_memorder, fail_memorder)
90 
91 #define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
92 	atomic_fetch_add_explicit(ptr, val, memorder)
93 
94 #define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
95 	atomic_fetch_sub_explicit(ptr, val, memorder)
96 
97 #define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
98 	atomic_fetch_and_explicit(ptr, val, memorder)
99 
100 #define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
101 	atomic_fetch_xor_explicit(ptr, val, memorder)
102 
103 #define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
104 	atomic_fetch_or_explicit(ptr, val, memorder)
105 
106 #define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
107 	atomic_fetch_nand_explicit(ptr, val, memorder)
108 
109 #define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
110 	atomic_flag_test_and_set_explicit(ptr, memorder)
111 
112 #define rte_atomic_flag_clear_explicit(ptr, memorder) \
113 	atomic_flag_clear_explicit(ptr, memorder)
114 
115 /* We provide internal macro here to allow conditional expansion
116  * in the body of the per-arch rte_atomic_thread_fence inline functions.
117  */
118 #define __rte_atomic_thread_fence(memorder) \
119 	atomic_thread_fence(memorder)
120 
121 #else /* !RTE_ENABLE_STDATOMIC */
122 
123 #define RTE_ATOMIC(type) type
124 
125 #define __rte_atomic
126 
127 /* The memory order is an integer type in GCC built-ins,
128  * not an enumerated type like in C11.
129  */
130 typedef int rte_memory_order;
131 
132 #define rte_memory_order_relaxed __ATOMIC_RELAXED
133 #define rte_memory_order_consume __ATOMIC_CONSUME
134 #define rte_memory_order_acquire __ATOMIC_ACQUIRE
135 #define rte_memory_order_release __ATOMIC_RELEASE
136 #define rte_memory_order_acq_rel __ATOMIC_ACQ_REL
137 #define rte_memory_order_seq_cst __ATOMIC_SEQ_CST
138 
139 #define rte_atomic_load_explicit(ptr, memorder) \
140 	__atomic_load_n(ptr, memorder)
141 
142 #define rte_atomic_store_explicit(ptr, val, memorder) \
143 	__atomic_store_n(ptr, val, memorder)
144 
145 #define rte_atomic_exchange_explicit(ptr, val, memorder) \
146 	__atomic_exchange_n(ptr, val, memorder)
147 
148 #define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
149 		succ_memorder, fail_memorder) \
150 	__atomic_compare_exchange_n(ptr, expected, desired, 0, \
151 		succ_memorder, fail_memorder)
152 
153 #define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
154 		succ_memorder, fail_memorder) \
155 	__atomic_compare_exchange_n(ptr, expected, desired, 1, \
156 		succ_memorder, fail_memorder)
157 
158 #define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
159 	__atomic_fetch_add(ptr, val, memorder)
160 
161 #define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
162 	__atomic_fetch_sub(ptr, val, memorder)
163 
164 #define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
165 	__atomic_fetch_and(ptr, val, memorder)
166 
167 #define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
168 	__atomic_fetch_xor(ptr, val, memorder)
169 
170 #define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
171 	__atomic_fetch_or(ptr, val, memorder)
172 
173 #define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
174 	__atomic_fetch_nand(ptr, val, memorder)
175 
176 #define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
177 	__atomic_test_and_set(ptr, memorder)
178 
179 #define rte_atomic_flag_clear_explicit(ptr, memorder) \
180 	__atomic_clear(ptr, memorder)
181 
182 /* We provide internal macro here to allow conditional expansion
183  * in the body of the per-arch rte_atomic_thread_fence inline functions.
184  */
185 #define __rte_atomic_thread_fence(memorder) \
186 	__atomic_thread_fence(memorder)
187 
188 #endif
189 
190 #ifdef __cplusplus
191 }
192 #endif
193 
194 #endif /* RTE_STDATOMIC_H */
195