xref: /netbsd-src/external/mit/libuv/dist/src/unix/atomic-ops.h (revision 5f2f42719cd62ff11fd913b40b7ce19f07c4fd25)
1 /* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl>
2  *
3  * Permission to use, copy, modify, and/or distribute this software for any
4  * purpose with or without fee is hereby granted, provided that the above
5  * copyright notice and this permission notice appear in all copies.
6  *
7  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14  */
15 
16 #ifndef UV_ATOMIC_OPS_H_
17 #define UV_ATOMIC_OPS_H_
18 
19 #include "internal.h"  /* UV_UNUSED */
20 
21 #if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
22 #include <atomic.h>
23 #endif
24 
25 UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));
26 UV_UNUSED(static void cpu_relax(void));
27 
28 /* Prefer hand-rolled assembly over the gcc builtins because the latter also
29  * issue full memory barriers.
30  */
UV_UNUSED(static int cmpxchgi (int * ptr,int oldval,int newval))31 UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {
32 #if defined(__i386__) || defined(__x86_64__)
33   int out;
34   __asm__ __volatile__ ("lock; cmpxchg %2, %1;"
35                         : "=a" (out), "+m" (*(volatile int*) ptr)
36                         : "r" (newval), "0" (oldval)
37                         : "memory");
38   return out;
39 #elif defined(__MVS__)
40   /* Use hand-rolled assembly because codegen from builtin __plo_CSST results in
41    * a runtime bug.
42    */
43   __asm(" cs %0,%2,%1 \n " : "+r"(oldval), "+m"(*ptr) : "r"(newval) :);
44   return oldval;
45 #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
46   return atomic_cas_uint((uint_t *)ptr, (uint_t)oldval, (uint_t)newval);
47 #else
48   return __sync_val_compare_and_swap(ptr, oldval, newval);
49 #endif
50 }
51 
UV_UNUSED(static void cpu_relax (void))52 UV_UNUSED(static void cpu_relax(void)) {
53 #if defined(__i386__) || defined(__x86_64__)
54   __asm__ __volatile__ ("rep; nop" ::: "memory");  /* a.k.a. PAUSE */
55 #elif (defined(__arm__) && __ARM_ARCH >= 7) || defined(__aarch64__)
56   __asm__ __volatile__ ("yield" ::: "memory");
57 #elif (defined(__ppc__) || defined(__ppc64__)) && defined(__APPLE__)
58   __asm volatile ("" : : : "memory");
59 #elif !defined(__APPLE__) && (defined(__powerpc64__) || defined(__ppc64__) || defined(__PPC64__))
60   __asm__ __volatile__ ("or 1,1,1; or 2,2,2" ::: "memory");
61 #endif
62 }
63 
64 #endif  /* UV_ATOMIC_OPS_H_ */
65