1 /*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013-2015 François Tigeot 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 */ 29 30 #ifndef _ASM_ATOMIC_H_ 31 #define _ASM_ATOMIC_H_ 32 33 #include <sys/types.h> 34 #include <machine/atomic.h> 35 #include <linux/compiler.h> 36 37 typedef struct { 38 volatile u_int counter; 39 } atomic_t; 40 41 typedef struct { 42 volatile u_long counter; 43 } atomic64_t; 44 45 #define atomic_add(i, v) atomic_add_return((i), (v)) 46 #define atomic_sub(i, v) atomic_sub_return((i), (v)) 47 #define atomic_inc_return(v) atomic_add_return(1, (v)) 48 #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0) 49 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) 50 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 51 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 52 #define atomic_dec_return(v) atomic_sub_return(1, (v)) 53 54 #define atomic_xchg(p, v) atomic_swap_int(&((p)->counter), v) 55 #define atomic64_xchg(p, v) atomic_swap_long(&((p)->counter), v) 56 57 #define atomic_cmpset(p, o, n) atomic_cmpset_32(&((p)->counter), o, n) 58 59 static inline int 60 atomic_add_return(int i, atomic_t *v) 61 { 62 return i + atomic_fetchadd_int(&v->counter, i); 63 } 64 65 static inline int 66 atomic_sub_return(int i, atomic_t *v) 67 { 68 return atomic_fetchadd_int(&v->counter, -i) - i; 69 } 70 71 static inline void 72 atomic_set(atomic_t *v, int i) 73 { 74 atomic_store_rel_int(&v->counter, i); 75 } 76 77 static inline void 78 atomic64_set(atomic64_t *v, long i) 79 { 80 atomic_store_rel_long(&v->counter, i); 81 } 82 83 static inline int 84 atomic_read(atomic_t *v) 85 { 86 return atomic_load_acq_int(&v->counter); 87 } 88 89 static inline int64_t 90 atomic64_read(atomic64_t *v) 91 { 92 return atomic_load_acq_long(&v->counter); 93 } 94 95 static inline int 96 atomic_inc(atomic_t *v) 97 { 98 return atomic_fetchadd_int(&v->counter, 1) + 1; 99 } 100 101 static inline int 102 atomic_dec(atomic_t *v) 103 { 104 return atomic_fetchadd_int(&v->counter, -1) - 1; 105 } 106 107 static inline int atomic_add_unless(atomic_t *v, int a, int u) 108 { 109 int c, old; 110 c = atomic_read(v); 111 for (;;) { 112 if (unlikely(c == (u))) 113 break; 114 // old = atomic_cmpxchg((v), c, c + (a)); /*Linux*/ 115 old = atomic_cmpset_int(&v->counter, c, c + (a)); 116 if (likely(old == c)) 117 break; 118 c = old; 119 } 120 return c != (u); 121 } 122 123 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 124 125 /* atomic_clear_mask: atomically clears a variable from the bit set in mask */ 126 #define atomic_clear_mask(mask, addr) \ 127 /* atomic *addr &= ~mask; */ \ 128 __asm __volatile("lock andl %0, %1" \ 129 : \ 130 : "r" (~mask), "m" (*addr) \ 131 : "memory"); 132 133 /* atomic_set_mask: atomically set bits in a variable */ 134 #define atomic_set_mask(mask, addr) \ 135 /* atomic *addr |= mask; */ \ 136 __asm __volatile("lock orl %0, %1" \ 137 : \ 138 : "r" (mask), "m" (*addr) \ 139 : "memory"); 140 141 142 #define smp_mb__before_atomic_inc() cpu_ccfence() 143 #define smp_mb__after_atomic_inc() cpu_ccfence() 144 145 #endif /* _ASM_ATOMIC_H_ */ 146