1 /* Linux-specific atomic operations for ARM EABI. 2 Copyright (C) 2008-2019 Free Software Foundation, Inc. 3 Contributed by CodeSourcery. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 Under Section 7 of GPL version 3, you are granted additional 18 permissions described in the GCC Runtime Library Exception, version 19 3.1, as published by the Free Software Foundation. 20 21 You should have received a copy of the GNU General Public License and 22 a copy of the GCC Runtime Library Exception along with this program; 23 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 24 <http://www.gnu.org/licenses/>. */ 25 26 /* Kernel helper for compare-and-exchange. */ 27 typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr); 28 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0) 29 30 /* Kernel helper for memory barrier. */ 31 typedef void (__kernel_dmb_t) (void); 32 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0) 33 34 /* Note: we implement byte, short and int versions of atomic operations using 35 the above kernel helpers; see linux-atomic-64bit.c for "long long" (64-bit) 36 operations. */ 37 38 #define HIDDEN __attribute__ ((visibility ("hidden"))) 39 40 #ifdef __ARMEL__ 41 #define INVERT_MASK_1 0 42 #define INVERT_MASK_2 0 43 #else 44 #define INVERT_MASK_1 24 45 #define INVERT_MASK_2 16 46 #endif 47 48 #define MASK_1 0xffu 49 #define MASK_2 0xffffu 50 51 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \ 52 int HIDDEN \ 53 __sync_fetch_and_##OP##_4 (int *ptr, int val) \ 54 { \ 55 int failure, tmp; \ 56 \ 57 do { \ 58 tmp = *ptr; \ 59 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \ 60 } while (failure != 0); \ 61 \ 62 return tmp; \ 63 } 64 65 FETCH_AND_OP_WORD (add, , +) 66 FETCH_AND_OP_WORD (sub, , -) 67 FETCH_AND_OP_WORD (or, , |) 68 FETCH_AND_OP_WORD (and, , &) 69 FETCH_AND_OP_WORD (xor, , ^) 70 FETCH_AND_OP_WORD (nand, ~, &) 71 72 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH 73 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH 74 75 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for 76 subword-sized quantities. */ 77 78 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \ 79 TYPE HIDDEN \ 80 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \ 81 { \ 82 int *wordptr = (int *) ((unsigned int) ptr & ~3); \ 83 unsigned int mask, shift, oldval, newval; \ 84 int failure; \ 85 \ 86 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \ 87 mask = MASK_##WIDTH << shift; \ 88 \ 89 do { \ 90 oldval = *wordptr; \ 91 newval = ((PFX_OP (((oldval & mask) >> shift) \ 92 INF_OP (unsigned int) val)) << shift) & mask; \ 93 newval |= oldval & ~mask; \ 94 failure = __kernel_cmpxchg (oldval, newval, wordptr); \ 95 } while (failure != 0); \ 96 \ 97 return (RETURN & mask) >> shift; \ 98 } 99 100 SUBWORD_SYNC_OP (add, , +, short, 2, oldval) 101 SUBWORD_SYNC_OP (sub, , -, short, 2, oldval) 102 SUBWORD_SYNC_OP (or, , |, short, 2, oldval) 103 SUBWORD_SYNC_OP (and, , &, short, 2, oldval) 104 SUBWORD_SYNC_OP (xor, , ^, short, 2, oldval) 105 SUBWORD_SYNC_OP (nand, ~, &, short, 2, oldval) 106 107 SUBWORD_SYNC_OP (add, , +, signed char, 1, oldval) 108 SUBWORD_SYNC_OP (sub, , -, signed char, 1, oldval) 109 SUBWORD_SYNC_OP (or, , |, signed char, 1, oldval) 110 SUBWORD_SYNC_OP (and, , &, signed char, 1, oldval) 111 SUBWORD_SYNC_OP (xor, , ^, signed char, 1, oldval) 112 SUBWORD_SYNC_OP (nand, ~, &, signed char, 1, oldval) 113 114 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \ 115 int HIDDEN \ 116 __sync_##OP##_and_fetch_4 (int *ptr, int val) \ 117 { \ 118 int tmp, failure; \ 119 \ 120 do { \ 121 tmp = *ptr; \ 122 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \ 123 } while (failure != 0); \ 124 \ 125 return PFX_OP (tmp INF_OP val); \ 126 } 127 128 OP_AND_FETCH_WORD (add, , +) 129 OP_AND_FETCH_WORD (sub, , -) 130 OP_AND_FETCH_WORD (or, , |) 131 OP_AND_FETCH_WORD (and, , &) 132 OP_AND_FETCH_WORD (xor, , ^) 133 OP_AND_FETCH_WORD (nand, ~, &) 134 135 SUBWORD_SYNC_OP (add, , +, short, 2, newval) 136 SUBWORD_SYNC_OP (sub, , -, short, 2, newval) 137 SUBWORD_SYNC_OP (or, , |, short, 2, newval) 138 SUBWORD_SYNC_OP (and, , &, short, 2, newval) 139 SUBWORD_SYNC_OP (xor, , ^, short, 2, newval) 140 SUBWORD_SYNC_OP (nand, ~, &, short, 2, newval) 141 142 SUBWORD_SYNC_OP (add, , +, signed char, 1, newval) 143 SUBWORD_SYNC_OP (sub, , -, signed char, 1, newval) 144 SUBWORD_SYNC_OP (or, , |, signed char, 1, newval) 145 SUBWORD_SYNC_OP (and, , &, signed char, 1, newval) 146 SUBWORD_SYNC_OP (xor, , ^, signed char, 1, newval) 147 SUBWORD_SYNC_OP (nand, ~, &, signed char, 1, newval) 148 149 int HIDDEN 150 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval) 151 { 152 int actual_oldval, fail; 153 154 while (1) 155 { 156 actual_oldval = *ptr; 157 158 if (__builtin_expect (oldval != actual_oldval, 0)) 159 return actual_oldval; 160 161 fail = __kernel_cmpxchg (actual_oldval, newval, ptr); 162 163 if (__builtin_expect (!fail, 1)) 164 return oldval; 165 } 166 } 167 168 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \ 169 TYPE HIDDEN \ 170 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \ 171 TYPE newval) \ 172 { \ 173 int *wordptr = (int *)((unsigned int) ptr & ~3), fail; \ 174 unsigned int mask, shift, actual_oldval, actual_newval; \ 175 \ 176 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \ 177 mask = MASK_##WIDTH << shift; \ 178 \ 179 while (1) \ 180 { \ 181 actual_oldval = *wordptr; \ 182 \ 183 if (__builtin_expect (((actual_oldval & mask) >> shift) != \ 184 ((unsigned int) oldval & MASK_##WIDTH), 0)) \ 185 return (actual_oldval & mask) >> shift; \ 186 \ 187 actual_newval = (actual_oldval & ~mask) \ 188 | (((unsigned int) newval << shift) & mask); \ 189 \ 190 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \ 191 wordptr); \ 192 \ 193 if (__builtin_expect (!fail, 1)) \ 194 return oldval; \ 195 } \ 196 } 197 198 SUBWORD_VAL_CAS (short, 2) 199 SUBWORD_VAL_CAS (signed char, 1) 200 201 typedef unsigned char bool; 202 203 bool HIDDEN 204 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval) 205 { 206 int failure = __kernel_cmpxchg (oldval, newval, ptr); 207 return (failure == 0); 208 } 209 210 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \ 211 bool HIDDEN \ 212 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \ 213 TYPE newval) \ 214 { \ 215 TYPE actual_oldval \ 216 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \ 217 return (oldval == actual_oldval); \ 218 } 219 220 SUBWORD_BOOL_CAS (short, 2) 221 SUBWORD_BOOL_CAS (signed char, 1) 222 223 void HIDDEN 224 __sync_synchronize (void) 225 { 226 __kernel_dmb (); 227 } 228 229 int HIDDEN 230 __sync_lock_test_and_set_4 (int *ptr, int val) 231 { 232 int failure, oldval; 233 234 do { 235 oldval = *ptr; 236 failure = __kernel_cmpxchg (oldval, val, ptr); 237 } while (failure != 0); 238 239 return oldval; 240 } 241 242 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \ 243 TYPE HIDDEN \ 244 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \ 245 { \ 246 int failure; \ 247 unsigned int oldval, newval, shift, mask; \ 248 int *wordptr = (int *) ((unsigned int) ptr & ~3); \ 249 \ 250 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \ 251 mask = MASK_##WIDTH << shift; \ 252 \ 253 do { \ 254 oldval = *wordptr; \ 255 newval = (oldval & ~mask) \ 256 | (((unsigned int) val << shift) & mask); \ 257 failure = __kernel_cmpxchg (oldval, newval, wordptr); \ 258 } while (failure != 0); \ 259 \ 260 return (oldval & mask) >> shift; \ 261 } 262 263 SUBWORD_TEST_AND_SET (short, 2) 264 SUBWORD_TEST_AND_SET (signed char, 1) 265 266 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \ 267 void HIDDEN \ 268 __sync_lock_release_##WIDTH (TYPE *ptr) \ 269 { \ 270 /* All writes before this point must be seen before we release \ 271 the lock itself. */ \ 272 __kernel_dmb (); \ 273 *ptr = 0; \ 274 } 275 276 SYNC_LOCK_RELEASE (long long, 8) 277 SYNC_LOCK_RELEASE (int, 4) 278 SYNC_LOCK_RELEASE (short, 2) 279 SYNC_LOCK_RELEASE (char, 1) 280