xref: /dpdk/lib/eal/ppc/include/rte_atomic.h (revision 719834a6849e1daf4a70ff7742bbcc3ae7e25607)
1 /*
2  * SPDX-License-Identifier: BSD-3-Clause
3  * Inspired from FreeBSD src/sys/powerpc/include/atomic.h
4  * Copyright (c) 2021 IBM Corporation
5  * Copyright (c) 2008 Marcel Moolenaar
6  * Copyright (c) 2001 Benno Rice
7  * Copyright (c) 2001 David E. O'Brien
8  * Copyright (c) 1998 Doug Rabson
9  * All rights reserved.
10  */
11 
12 #ifndef _RTE_ATOMIC_PPC_64_H_
13 #define _RTE_ATOMIC_PPC_64_H_
14 
15 #include <stdint.h>
16 #include "generic/rte_atomic.h"
17 
18 #ifdef __cplusplus
19 extern "C" {
20 #endif
21 
22 #define	rte_mb()  asm volatile("sync" : : : "memory")
23 
24 #define	rte_wmb() asm volatile("sync" : : : "memory")
25 
26 #define	rte_rmb() asm volatile("sync" : : : "memory")
27 
28 #define rte_smp_mb() rte_mb()
29 
30 #define rte_smp_wmb() rte_wmb()
31 
32 #define rte_smp_rmb() rte_rmb()
33 
34 #define rte_io_mb() rte_mb()
35 
36 #define rte_io_wmb() rte_wmb()
37 
38 #define rte_io_rmb() rte_rmb()
39 
40 static __rte_always_inline void
41 rte_atomic_thread_fence(rte_memory_order memorder)
42 {
43 	__rte_atomic_thread_fence(memorder);
44 }
45 
46 /*------------------------- 16 bit atomic operations -------------------------*/
47 #ifndef RTE_FORCE_INTRINSICS
48 static inline int
49 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
50 {
51 	return rte_atomic_compare_exchange_strong_explicit(dst, &exp, src, rte_memory_order_acquire,
52 		rte_memory_order_acquire) ? 1 : 0;
53 }
54 
55 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
56 {
57 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
58 }
59 
60 static inline void
61 rte_atomic16_inc(rte_atomic16_t *v)
62 {
63 	rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire);
64 }
65 
66 static inline void
67 rte_atomic16_dec(rte_atomic16_t *v)
68 {
69 	rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire);
70 }
71 
72 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
73 {
74 	return rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire) + 1 == 0;
75 }
76 
77 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
78 {
79 	return rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire) - 1 == 0;
80 }
81 
82 static inline uint16_t
83 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
84 {
85 	return __atomic_exchange_2(dst, val, rte_memory_order_seq_cst);
86 }
87 
88 /*------------------------- 32 bit atomic operations -------------------------*/
89 
90 static inline int
91 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
92 {
93 	return rte_atomic_compare_exchange_strong_explicit(dst, &exp, src, rte_memory_order_acquire,
94 		rte_memory_order_acquire) ? 1 : 0;
95 }
96 
97 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
98 {
99 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
100 }
101 
102 static inline void
103 rte_atomic32_inc(rte_atomic32_t *v)
104 {
105 	rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire);
106 }
107 
108 static inline void
109 rte_atomic32_dec(rte_atomic32_t *v)
110 {
111 	rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire);
112 }
113 
114 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
115 {
116 	return rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire) + 1 == 0;
117 }
118 
119 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
120 {
121 	return rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire) - 1 == 0;
122 }
123 
124 static inline uint32_t
125 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
126 {
127 	return __atomic_exchange_4(dst, val, rte_memory_order_seq_cst);
128 }
129 
130 /*------------------------- 64 bit atomic operations -------------------------*/
131 
132 static inline int
133 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
134 {
135 	return rte_atomic_compare_exchange_strong_explicit(dst, &exp, src, rte_memory_order_acquire,
136 		rte_memory_order_acquire) ? 1 : 0;
137 }
138 
139 static inline void
140 rte_atomic64_init(rte_atomic64_t *v)
141 {
142 	v->cnt = 0;
143 }
144 
145 static inline int64_t
146 rte_atomic64_read(rte_atomic64_t *v)
147 {
148 	return v->cnt;
149 }
150 
151 static inline void
152 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
153 {
154 	v->cnt = new_value;
155 }
156 
157 static inline void
158 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
159 {
160 	rte_atomic_fetch_add_explicit(&v->cnt, inc, rte_memory_order_acquire);
161 }
162 
163 static inline void
164 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
165 {
166 	rte_atomic_fetch_sub_explicit(&v->cnt, dec, rte_memory_order_acquire);
167 }
168 
169 static inline void
170 rte_atomic64_inc(rte_atomic64_t *v)
171 {
172 	rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire);
173 }
174 
175 static inline void
176 rte_atomic64_dec(rte_atomic64_t *v)
177 {
178 	rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire);
179 }
180 
181 static inline int64_t
182 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
183 {
184 	return rte_atomic_fetch_add_explicit(&v->cnt, inc, rte_memory_order_acquire) + inc;
185 }
186 
187 static inline int64_t
188 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
189 {
190 	return rte_atomic_fetch_sub_explicit(&v->cnt, dec, rte_memory_order_acquire) - dec;
191 }
192 
193 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
194 {
195 	return rte_atomic_fetch_add_explicit(&v->cnt, 1, rte_memory_order_acquire) + 1 == 0;
196 }
197 
198 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
199 {
200 	return rte_atomic_fetch_sub_explicit(&v->cnt, 1, rte_memory_order_acquire) - 1 == 0;
201 }
202 
203 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
204 {
205 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
206 }
207 
208 static inline void rte_atomic64_clear(rte_atomic64_t *v)
209 {
210 	v->cnt = 0;
211 }
212 
213 static inline uint64_t
214 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
215 {
216 	return __atomic_exchange_8(dst, val, rte_memory_order_seq_cst);
217 }
218 
219 #endif
220 
221 #ifdef __cplusplus
222 }
223 #endif
224 
225 #endif /* _RTE_ATOMIC_PPC_64_H_ */
226