1 /* $OpenBSD: atomic.h,v 1.15 2017/07/04 09:00:12 mpi Exp $ */
2 /*
3 * Copyright (c) 2007 Artur Grabowski <art@openbsd.org>
4 *
5 * Permission to use, copy, modify, and distribute this software for any
6 * purpose with or without fee is hereby granted, provided that the above
7 * copyright notice and this permission notice appear in all copies.
8 *
9 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 */
17
18 #ifndef _MACHINE_ATOMIC_H_
19 #define _MACHINE_ATOMIC_H_
20
21 static inline unsigned int
_atomic_cas_uint(volatile unsigned int * p,unsigned int e,unsigned int n)22 _atomic_cas_uint(volatile unsigned int *p, unsigned int e, unsigned int n)
23 {
24 __asm volatile("cas [%2], %3, %0"
25 : "+r" (n), "=m" (*p)
26 : "r" (p), "r" (e), "m" (*p));
27
28 return (n);
29 }
30 #define atomic_cas_uint(_p, _e, _n) _atomic_cas_uint((_p), (_e), (_n))
31
32 static inline unsigned long
_atomic_cas_ulong(volatile unsigned long * p,unsigned long e,unsigned long n)33 _atomic_cas_ulong(volatile unsigned long *p, unsigned long e, unsigned long n)
34 {
35 __asm volatile("casx [%2], %3, %0"
36 : "+r" (n), "=m" (*p)
37 : "r" (p), "r" (e), "m" (*p));
38
39 return (n);
40 }
41 #define atomic_cas_ulong(_p, _e, _n) _atomic_cas_ulong((_p), (_e), (_n))
42
43 static inline void *
_atomic_cas_ptr(volatile void * p,void * e,void * n)44 _atomic_cas_ptr(volatile void *p, void *e, void *n)
45 {
46 __asm volatile("casx [%2], %3, %0"
47 : "+r" (n), "=m" (*(volatile unsigned long *)p)
48 : "r" (p), "r" (e), "m" (*(volatile unsigned long *)p));
49
50 return (n);
51 }
52 #define atomic_cas_ptr(_p, _e, _n) _atomic_cas_ptr((_p), (_e), (_n))
53
54 #define _def_atomic_swap(_f, _t, _c) \
55 static inline _t \
56 _f(volatile _t *p, _t v) \
57 { \
58 _t e; \
59 _t r; \
60 \
61 r = (_t)*p; \
62 do { \
63 e = r; \
64 r = _c(p, e, v); \
65 } while (r != e); \
66 \
67 return (r); \
68 }
69
_def_atomic_swap(_atomic_swap_uint,unsigned int,atomic_cas_uint)70 _def_atomic_swap(_atomic_swap_uint, unsigned int, atomic_cas_uint)
71 _def_atomic_swap(_atomic_swap_ulong, unsigned long, atomic_cas_ulong)
72 #undef _def_atomic_swap
73
74 static inline void *
75 _atomic_swap_ptr(volatile void *p, void *v)
76 {
77 void *e, *r;
78
79 r = *(void **)p;
80 do {
81 e = r;
82 r = atomic_cas_ptr(p, e, v);
83 } while (r != e);
84
85 return (r);
86 }
87
88 #define atomic_swap_uint(_p, _v) _atomic_swap_uint(_p, _v)
89 #define atomic_swap_ulong(_p, _v) _atomic_swap_ulong(_p, _v)
90 #define atomic_swap_ptr(_p, _v) _atomic_swap_ptr(_p, _v)
91
92 #define _def_atomic_op_nv(_f, _t, _c, _op) \
93 static inline _t \
94 _f(volatile _t *p, _t v) \
95 { \
96 _t e, r, f; \
97 \
98 r = *p; \
99 do { \
100 e = r; \
101 f = e _op v; \
102 r = _c(p, e, f); \
103 } while (r != e); \
104 \
105 return (f); \
106 }
107
108 _def_atomic_op_nv(_atomic_add_int_nv, unsigned int, atomic_cas_uint, +)
109 _def_atomic_op_nv(_atomic_add_long_nv, unsigned long, atomic_cas_ulong, +)
110 _def_atomic_op_nv(_atomic_sub_int_nv, unsigned int, atomic_cas_uint, -)
111 _def_atomic_op_nv(_atomic_sub_long_nv, unsigned long, atomic_cas_ulong, -)
112 #undef _def_atomic_op_nv
113
114 #define atomic_add_int_nv(_p, _v) _atomic_add_int_nv(_p, _v)
115 #define atomic_add_long_nv(_p, _v) _atomic_add_long_nv(_p, _v)
116 #define atomic_sub_int_nv(_p, _v) _atomic_sub_int_nv(_p, _v)
117 #define atomic_sub_long_nv(_p, _v) _atomic_sub_long_nv(_p, _v)
118
119 #define __membar(_m) __asm volatile("membar " _m ::: "memory")
120
121 #define membar_enter() __membar("#StoreLoad|#StoreStore")
122 #define membar_exit() __membar("#LoadStore|#StoreStore")
123 #define membar_producer() __membar("#StoreStore")
124 #define membar_consumer() __membar("#LoadLoad")
125 #define membar_sync() __membar("#Sync")
126
127 #if defined(_KERNEL)
128
129 static __inline void
atomic_setbits_int(volatile unsigned int * uip,unsigned int v)130 atomic_setbits_int(volatile unsigned int *uip, unsigned int v)
131 {
132 unsigned int e, r;
133
134 r = *uip;
135 do {
136 e = r;
137 r = atomic_cas_uint(uip, e, e | v);
138 } while (r != e);
139 }
140
141 static __inline void
atomic_clearbits_int(volatile unsigned int * uip,unsigned int v)142 atomic_clearbits_int(volatile unsigned int *uip, unsigned int v)
143 {
144 unsigned int e, r;
145
146 r = *uip;
147 do {
148 e = r;
149 r = atomic_cas_uint(uip, e, e & ~v);
150 } while (r != e);
151 }
152
153 #endif /* defined(_KERNEL) */
154 #endif /* _MACHINE_ATOMIC_H_ */
155