1*dd81489dSjsg /* $OpenBSD: atomic.h,v 1.19 2022/08/29 02:01:18 jsg Exp $ */
2f57756c9Sart
3f57756c9Sart /* Public Domain */
4f57756c9Sart
52fa72412Spirofti #ifndef _ARM_ATOMIC_H_
62fa72412Spirofti #define _ARM_ATOMIC_H_
7f57756c9Sart
8570b3570Spatrick /*
9570b3570Spatrick * Compare and set:
10570b3570Spatrick * ret = *ptr
11570b3570Spatrick * if (ret == expect)
12570b3570Spatrick * *ptr = new
13570b3570Spatrick * return (ret)
14570b3570Spatrick */
152e4a5af4Smpi #define _def_atomic_cas(_f, _t) \
16570b3570Spatrick static inline _t \
17570b3570Spatrick _f(volatile _t *p, _t e, _t n) \
18570b3570Spatrick { \
19570b3570Spatrick _t ret, modified; \
20570b3570Spatrick \
21570b3570Spatrick __asm volatile ( \
22570b3570Spatrick "1: ldrex %0, [%4] \n\t" \
23570b3570Spatrick " cmp %0, %3 \n\t" \
24570b3570Spatrick " bne 2f \n\t" \
25570b3570Spatrick " strex %1, %2, [%4] \n\t" \
26570b3570Spatrick " cmp %1, #0 \n\t" \
27570b3570Spatrick " bne 1b \n\t" \
28570b3570Spatrick " b 3f \n\t" \
29570b3570Spatrick "2: clrex \n\t" \
30570b3570Spatrick "3: \n\t" \
31570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
32570b3570Spatrick : "r" (n), "r" (e), "r" (p) \
33570b3570Spatrick : "memory", "cc" \
34570b3570Spatrick ); \
35570b3570Spatrick return (ret); \
36570b3570Spatrick }
_def_atomic_cas(_atomic_cas_uint,unsigned int)372e4a5af4Smpi _def_atomic_cas(_atomic_cas_uint, unsigned int)
382e4a5af4Smpi _def_atomic_cas(_atomic_cas_ulong, unsigned long)
392e4a5af4Smpi #undef _def_atomic_cas
40570b3570Spatrick
41570b3570Spatrick #define atomic_cas_uint(_p, _e, _n) _atomic_cas_uint((_p), (_e), (_n))
42570b3570Spatrick #define atomic_cas_ulong(_p, _e, _n) _atomic_cas_ulong((_p), (_e), (_n))
43570b3570Spatrick
44570b3570Spatrick static inline void *
45570b3570Spatrick _atomic_cas_ptr(volatile void *p, void *e, void *n)
46570b3570Spatrick {
47570b3570Spatrick void *ret;
486dd3322cSkettenis unsigned long modified;
49570b3570Spatrick
50570b3570Spatrick __asm volatile (
51570b3570Spatrick "1: ldrex %0, [%4] \n\t"
52570b3570Spatrick " cmp %0, %3 \n\t"
53570b3570Spatrick " bne 2f \n\t"
54570b3570Spatrick " strex %1, %2, [%4] \n\t"
55570b3570Spatrick " cmp %1, #0 \n\t"
56570b3570Spatrick " bne 1b \n\t"
57570b3570Spatrick " b 3f \n\t"
58570b3570Spatrick "2: clrex \n\t"
59570b3570Spatrick "3: \n\t"
60570b3570Spatrick : "=&r" (ret), "=&r" (modified)
61570b3570Spatrick : "r" (n), "r" (e), "r" (p)
62570b3570Spatrick : "memory", "cc"
63570b3570Spatrick );
64570b3570Spatrick return (ret);
65570b3570Spatrick }
66570b3570Spatrick #define atomic_cas_ptr(_p, _e, _n) _atomic_cas_ptr((_p), (_e), (_n))
67570b3570Spatrick
68570b3570Spatrick /*
69570b3570Spatrick * Swap:
70570b3570Spatrick * ret = *p
71570b3570Spatrick * *p = val
72570b3570Spatrick * return (ret)
73570b3570Spatrick */
742e4a5af4Smpi #define _def_atomic_swap(_f, _t) \
75570b3570Spatrick static inline _t \
76570b3570Spatrick _f(volatile _t *p, _t v) \
77570b3570Spatrick { \
78570b3570Spatrick _t ret, modified; \
79570b3570Spatrick \
80570b3570Spatrick __asm volatile ( \
81570b3570Spatrick "1: ldrex %0, [%3] \n\t" \
82570b3570Spatrick " strex %1, %2, [%3] \n\t" \
83570b3570Spatrick " cmp %1, #0 \n\t" \
84570b3570Spatrick " bne 1b \n\t" \
85570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
86570b3570Spatrick : "r" (v), "r" (p) \
87570b3570Spatrick : "memory", "cc" \
88570b3570Spatrick ); \
89570b3570Spatrick return (ret); \
90570b3570Spatrick }
_def_atomic_swap(_atomic_swap_uint,unsigned int)912e4a5af4Smpi _def_atomic_swap(_atomic_swap_uint, unsigned int)
922e4a5af4Smpi _def_atomic_swap(_atomic_swap_ulong, unsigned long)
932e4a5af4Smpi #undef _def_atomic_swap
94570b3570Spatrick
95570b3570Spatrick #define atomic_swap_uint(_p, _v) _atomic_swap_uint((_p), (_v))
96570b3570Spatrick #define atomic_swap_ulong(_p, _v) _atomic_swap_ulong((_p), (_v))
97570b3570Spatrick
98570b3570Spatrick static inline void *
99570b3570Spatrick _atomic_swap_ptr(volatile void *p, void *v)
100570b3570Spatrick {
101570b3570Spatrick void *ret;
1026dd3322cSkettenis unsigned long modified;
103570b3570Spatrick
104570b3570Spatrick __asm volatile (
105570b3570Spatrick "1: ldrex %0, [%3] \n\t"
106570b3570Spatrick " strex %1, %2, [%3] \n\t"
107570b3570Spatrick " cmp %1, #0 \n\t"
108570b3570Spatrick " bne 1b \n\t"
109570b3570Spatrick : "=&r" (ret), "=&r" (modified)
110570b3570Spatrick : "r" (v), "r" (p)
111570b3570Spatrick : "memory", "cc"
112570b3570Spatrick );
113570b3570Spatrick return (ret);
114570b3570Spatrick }
115570b3570Spatrick #define atomic_swap_ptr(_p, _v) _atomic_swap_ptr((_p), (_v))
116570b3570Spatrick
117570b3570Spatrick /*
118570b3570Spatrick * Increment returning the new value
119570b3570Spatrick * *p += 1
120570b3570Spatrick * return (*p)
121570b3570Spatrick */
1222e4a5af4Smpi #define _def_atomic_inc_nv(_f, _t) \
123570b3570Spatrick static inline _t \
124570b3570Spatrick _f(volatile _t *p) \
125570b3570Spatrick { \
126570b3570Spatrick _t ret, modified; \
127570b3570Spatrick \
128570b3570Spatrick __asm volatile ( \
129570b3570Spatrick "1: ldrex %0, [%2] \n\t" \
130570b3570Spatrick " add %0, %0, #1 \n\t" \
131570b3570Spatrick " strex %1, %0, [%2] \n\t" \
132570b3570Spatrick " cmp %1, #0 \n\t" \
133570b3570Spatrick " bne 1b \n\t" \
134570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
135570b3570Spatrick : "r" (p) \
136570b3570Spatrick : "memory", "cc" \
137570b3570Spatrick ); \
138570b3570Spatrick return (ret); \
139570b3570Spatrick }
_def_atomic_inc_nv(_atomic_inc_int_nv,unsigned int)1402e4a5af4Smpi _def_atomic_inc_nv(_atomic_inc_int_nv, unsigned int)
1412e4a5af4Smpi _def_atomic_inc_nv(_atomic_inc_long_nv, unsigned long)
1422e4a5af4Smpi #undef _def_atomic_inc_nv
143570b3570Spatrick
144570b3570Spatrick #define atomic_inc_int_nv(_p) _atomic_inc_int_nv((_p))
145570b3570Spatrick #define atomic_inc_long_nv(_p) _atomic_inc_long_nv((_p))
146570b3570Spatrick
147570b3570Spatrick /*
148570b3570Spatrick * Decrement returning the new value
149570b3570Spatrick * *p -= 1
150570b3570Spatrick * return (*p)
151570b3570Spatrick */
1522e4a5af4Smpi #define _def_atomic_dec_nv(_f, _t) \
153570b3570Spatrick static inline _t \
154570b3570Spatrick _f(volatile _t *p) \
155570b3570Spatrick { \
156570b3570Spatrick _t ret, modified; \
157570b3570Spatrick \
158570b3570Spatrick __asm volatile ( \
159570b3570Spatrick "1: ldrex %0, [%2] \n\t" \
160570b3570Spatrick " sub %0, %0, #1 \n\t" \
161570b3570Spatrick " strex %1, %0, [%2] \n\t" \
162570b3570Spatrick " cmp %1, #0 \n\t" \
163570b3570Spatrick " bne 1b \n\t" \
164570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
165570b3570Spatrick : "r" (p) \
166570b3570Spatrick : "memory", "cc" \
167570b3570Spatrick ); \
168570b3570Spatrick return (ret); \
169570b3570Spatrick }
1702e4a5af4Smpi _def_atomic_dec_nv(_atomic_dec_int_nv, unsigned int)
1712e4a5af4Smpi _def_atomic_dec_nv(_atomic_dec_long_nv, unsigned long)
1722e4a5af4Smpi #undef _def_atomic_dec_nv
173570b3570Spatrick
174570b3570Spatrick #define atomic_dec_int_nv(_p) _atomic_dec_int_nv((_p))
175570b3570Spatrick #define atomic_dec_long_nv(_p) _atomic_dec_long_nv((_p))
176570b3570Spatrick
177570b3570Spatrick /*
178570b3570Spatrick * Addition returning the new value
179570b3570Spatrick * *p += v
180570b3570Spatrick * return (*p)
181570b3570Spatrick */
1822e4a5af4Smpi #define _def_atomic_add_nv(_f, _t) \
183570b3570Spatrick static inline _t \
184570b3570Spatrick _f(volatile _t *p, _t v) \
185570b3570Spatrick { \
186570b3570Spatrick _t ret, modified; \
187570b3570Spatrick \
188570b3570Spatrick __asm volatile ( \
189570b3570Spatrick "1: ldrex %0, [%2] \n\t" \
190570b3570Spatrick " add %0, %0, %3 \n\t" \
191570b3570Spatrick " strex %1, %0, [%2] \n\t" \
192570b3570Spatrick " cmp %1, #0 \n\t" \
193570b3570Spatrick " bne 1b \n\t" \
194570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
195570b3570Spatrick : "r" (p), "r" (v) \
196570b3570Spatrick : "memory", "cc" \
197570b3570Spatrick ); \
198570b3570Spatrick return (ret); \
199570b3570Spatrick }
2002e4a5af4Smpi _def_atomic_add_nv(_atomic_add_int_nv, unsigned int)
2012e4a5af4Smpi _def_atomic_add_nv(_atomic_add_long_nv, unsigned long)
2022e4a5af4Smpi #undef _def_atomic_add_nv
203570b3570Spatrick
204570b3570Spatrick #define atomic_add_int_nv(_p, _v) _atomic_add_int_nv((_p), (_v))
205570b3570Spatrick #define atomic_add_long_nv(_p, _v) _atomic_add_long_nv((_p), (_v))
206570b3570Spatrick
207570b3570Spatrick /*
208570b3570Spatrick * Subtraction returning the new value
209570b3570Spatrick * *p -= v
210570b3570Spatrick * return (*p)
211570b3570Spatrick */
2122e4a5af4Smpi #define _def_atomic_sub_nv(_f, _t) \
213570b3570Spatrick static inline _t \
214570b3570Spatrick _f(volatile _t *p, _t v) \
215570b3570Spatrick { \
216570b3570Spatrick _t ret, modified; \
217570b3570Spatrick \
218570b3570Spatrick __asm volatile ( \
219570b3570Spatrick "1: ldrex %0, [%2] \n\t" \
220570b3570Spatrick " sub %0, %0, %3 \n\t" \
221570b3570Spatrick " strex %1, %0, [%2] \n\t" \
222570b3570Spatrick " cmp %1, #0 \n\t" \
223570b3570Spatrick " bne 1b \n\t" \
224570b3570Spatrick : "=&r" (ret), "=&r" (modified) \
225570b3570Spatrick : "r" (p), "r" (v) \
226570b3570Spatrick : "memory", "cc" \
227570b3570Spatrick ); \
228570b3570Spatrick return (ret); \
229570b3570Spatrick }
2302e4a5af4Smpi _def_atomic_sub_nv(_atomic_sub_int_nv, unsigned int)
2312e4a5af4Smpi _def_atomic_sub_nv(_atomic_sub_long_nv, unsigned long)
2322e4a5af4Smpi #undef _def_atomic_sub_nv
233570b3570Spatrick
234570b3570Spatrick #define atomic_sub_int_nv(_p, _v) _atomic_sub_int_nv((_p), (_v))
235570b3570Spatrick #define atomic_sub_long_nv(_p, _v) _atomic_sub_long_nv((_p), (_v))
236570b3570Spatrick
237*dd81489dSjsg #define __membar(_f) do { __asm volatile(_f ::: "memory"); } while (0)
2382e4a5af4Smpi
2392e4a5af4Smpi #define membar_enter() __membar("dmb sy")
2402e4a5af4Smpi #define membar_exit() __membar("dmb sy")
2412e4a5af4Smpi #define membar_producer() __membar("dmb st")
2422e4a5af4Smpi #define membar_consumer() __membar("dmb sy")
2432e4a5af4Smpi #define membar_sync() __membar("dmb sy")
2442e4a5af4Smpi
2452e4a5af4Smpi #if defined(_KERNEL)
2462e4a5af4Smpi
2472e4a5af4Smpi /* virtio needs MP membars even on SP kernels */
2482e4a5af4Smpi #define virtio_membar_producer() __membar("dmb st")
2492e4a5af4Smpi #define virtio_membar_consumer() __membar("dmb sy")
2502e4a5af4Smpi #define virtio_membar_sync() __membar("dmb sy")
2512e4a5af4Smpi
252570b3570Spatrick /*
253570b3570Spatrick * Set bits
254570b3570Spatrick * *p = *p | v
255570b3570Spatrick */
256570b3570Spatrick static inline void
257570b3570Spatrick atomic_setbits_int(volatile unsigned int *p, unsigned int v)
258570b3570Spatrick {
259570b3570Spatrick unsigned int modified, tmp;
260570b3570Spatrick
261570b3570Spatrick __asm volatile (
262570b3570Spatrick "1: ldrex %0, [%3] \n\t"
263570b3570Spatrick " orr %0, %0, %2 \n\t"
264570b3570Spatrick " strex %1, %0, [%3] \n\t"
265570b3570Spatrick " cmp %1, #0 \n\t"
266570b3570Spatrick " bne 1b \n\t"
267570b3570Spatrick : "=&r" (tmp), "=&r" (modified)
268570b3570Spatrick : "r" (v), "r" (p)
269570b3570Spatrick : "memory", "cc"
270570b3570Spatrick );
271570b3570Spatrick }
272570b3570Spatrick
273570b3570Spatrick /*
274570b3570Spatrick * Clear bits
275570b3570Spatrick * *p = *p & (~v)
276570b3570Spatrick */
277570b3570Spatrick static inline void
atomic_clearbits_int(volatile unsigned int * p,unsigned int v)278570b3570Spatrick atomic_clearbits_int(volatile unsigned int *p, unsigned int v)
279570b3570Spatrick {
280570b3570Spatrick unsigned int modified, tmp;
281570b3570Spatrick
282570b3570Spatrick __asm volatile (
283570b3570Spatrick "1: ldrex %0, [%3] \n\t"
284570b3570Spatrick " bic %0, %0, %2 \n\t"
285570b3570Spatrick " strex %1, %0, [%3] \n\t"
286570b3570Spatrick " cmp %1, #0 \n\t"
287570b3570Spatrick " bne 1b \n\t"
288570b3570Spatrick : "=&r" (tmp), "=&r" (modified)
289570b3570Spatrick : "r" (v), "r" (p)
290570b3570Spatrick : "memory", "cc"
291570b3570Spatrick );
292570b3570Spatrick }
293570b3570Spatrick
2948aa3ef09Sderaadt #endif /* defined(_KERNEL) */
2952fa72412Spirofti #endif /* _ARM_ATOMIC_H_ */
296