1 /* $OpenBSD: atomic.h,v 1.13 2014/07/18 12:44:53 dlg Exp $ */ 2 /* 3 * Copyright (c) 2007 Artur Grabowski <art@openbsd.org> 4 * 5 * Permission to use, copy, modify, and distribute this software for any 6 * purpose with or without fee is hereby granted, provided that the above 7 * copyright notice and this permission notice appear in all copies. 8 * 9 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 */ 17 18 #ifndef _MACHINE_ATOMIC_H_ 19 #define _MACHINE_ATOMIC_H_ 20 21 #if defined(_KERNEL) 22 23 static inline unsigned int 24 _atomic_cas_uint(volatile unsigned int *p, unsigned int e, unsigned int n) 25 { 26 __asm volatile("cas [%2], %3, %0" 27 : "+r" (n), "=m" (*p) 28 : "r" (p), "r" (e), "m" (*p)); 29 30 return (n); 31 } 32 #define atomic_cas_uint(_p, _e, _n) _atomic_cas_uint((_p), (_e), (_n)) 33 34 static inline unsigned long 35 _atomic_cas_ulong(volatile unsigned long *p, unsigned long e, unsigned long n) 36 { 37 __asm volatile("casx [%2], %3, %0" 38 : "+r" (n), "=m" (*p) 39 : "r" (p), "r" (e), "m" (*p)); 40 41 return (n); 42 } 43 #define atomic_cas_ulong(_p, _e, _n) _atomic_cas_ulong((_p), (_e), (_n)) 44 45 static inline void * 46 _atomic_cas_ptr(volatile void *p, void *e, void *n) 47 { 48 __asm volatile("casx [%2], %3, %0" 49 : "+r" (n), "=m" (*(volatile unsigned long *)p) 50 : "r" (p), "r" (e), "m" (*(volatile unsigned long *)p)); 51 52 return (n); 53 } 54 #define atomic_cas_ptr(_p, _e, _n) _atomic_cas_ptr((_p), (_e), (_n)) 55 56 #define def_atomic_swap(_f, _t, _c) \ 57 static inline _t \ 58 _f(volatile _t *p, _t v) \ 59 { \ 60 _t e; \ 61 _t r; \ 62 \ 63 r = (_t)*p; \ 64 do { \ 65 e = r; \ 66 r = _c(p, e, v); \ 67 } while (r != e); \ 68 \ 69 return (r); \ 70 } 71 72 def_atomic_swap(_atomic_swap_uint, unsigned int, atomic_cas_uint) 73 def_atomic_swap(_atomic_swap_ulong, unsigned long, atomic_cas_ulong) 74 #undef def_atomic_swap 75 76 static inline void * 77 _atomic_swap_ptr(volatile void *p, void *v) 78 { 79 void *e, *r; 80 81 r = *(void **)p; 82 do { 83 e = r; 84 r = atomic_cas_ptr(p, e, v); 85 } while (r != e); 86 87 return (r); 88 } 89 90 #define atomic_swap_uint(_p, _v) _atomic_swap_uint(_p, _v) 91 #define atomic_swap_ulong(_p, _v) _atomic_swap_ulong(_p, _v) 92 #define atomic_swap_ptr(_p, _v) _atomic_swap_ptr(_p, _v) 93 94 #define def_atomic_op_nv(_f, _t, _c, _op) \ 95 static inline _t \ 96 _f(volatile _t *p, _t v) \ 97 { \ 98 _t e, r, f; \ 99 \ 100 r = *p; \ 101 do { \ 102 e = r; \ 103 f = e _op v; \ 104 r = _c(p, e, f); \ 105 } while (r != e); \ 106 \ 107 return (f); \ 108 } 109 110 def_atomic_op_nv(_atomic_add_int_nv, unsigned int, atomic_cas_uint, +) 111 def_atomic_op_nv(_atomic_add_long_nv, unsigned long, atomic_cas_ulong, +) 112 def_atomic_op_nv(_atomic_sub_int_nv, unsigned int, atomic_cas_uint, -) 113 def_atomic_op_nv(_atomic_sub_long_nv, unsigned long, atomic_cas_ulong, -) 114 #undef def_atomic_op_nv 115 116 #define atomic_add_int_nv(_p, _v) _atomic_add_int_nv(_p, _v) 117 #define atomic_add_long_nv(_p, _v) _atomic_add_long_nv(_p, _v) 118 #define atomic_sub_int_nv(_p, _v) _atomic_sub_int_nv(_p, _v) 119 #define atomic_sub_long_nv(_p, _v) _atomic_sub_long_nv(_p, _v) 120 121 static __inline void 122 atomic_setbits_int(volatile unsigned int *uip, unsigned int v) 123 { 124 unsigned int e, r; 125 126 r = *uip; 127 do { 128 e = r; 129 r = atomic_cas_uint(uip, e, e | v); 130 } while (r != e); 131 } 132 133 static __inline void 134 atomic_clearbits_int(volatile unsigned int *uip, unsigned int v) 135 { 136 unsigned int e, r; 137 138 r = *uip; 139 do { 140 e = r; 141 r = atomic_cas_uint(uip, e, e & ~v); 142 } while (r != e); 143 } 144 145 #define membar_enter() membar(StoreLoad|StoreStore) 146 #define membar_exit() membar(LoadStore|StoreStore) 147 #define membar_producer() membar(StoreStore) 148 #define membar_consumer() membar(LoadLoad) 149 #define membar_sync() membar(Sync) 150 151 #endif /* defined(_KERNEL) */ 152 #endif /* _MACHINE_ATOMIC_H_ */ 153