xref: /openbsd-src/gnu/gcc/libstdc++-v3/include/ext/atomicity.h (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1*404b540aSrobert // Support for atomic operations -*- C++ -*-
2*404b540aSrobert 
3*404b540aSrobert // Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
4*404b540aSrobert //
5*404b540aSrobert // This file is part of the GNU ISO C++ Library.  This library is free
6*404b540aSrobert // software; you can redistribute it and/or modify it under the
7*404b540aSrobert // terms of the GNU General Public License as published by the
8*404b540aSrobert // Free Software Foundation; either version 2, or (at your option)
9*404b540aSrobert // any later version.
10*404b540aSrobert 
11*404b540aSrobert // This library is distributed in the hope that it will be useful,
12*404b540aSrobert // but WITHOUT ANY WARRANTY; without even the implied warranty of
13*404b540aSrobert // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14*404b540aSrobert // GNU General Public License for more details.
15*404b540aSrobert 
16*404b540aSrobert // You should have received a copy of the GNU General Public License along
17*404b540aSrobert // with this library; see the file COPYING.  If not, write to the Free
18*404b540aSrobert // Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
19*404b540aSrobert // USA.
20*404b540aSrobert 
21*404b540aSrobert // As a special exception, you may use this file as part of a free software
22*404b540aSrobert // library without restriction.  Specifically, if other files instantiate
23*404b540aSrobert // templates or use macros or inline functions from this file, or you compile
24*404b540aSrobert // this file and link it with other files to produce an executable, this
25*404b540aSrobert // file does not by itself cause the resulting executable to be covered by
26*404b540aSrobert // the GNU General Public License.  This exception does not however
27*404b540aSrobert // invalidate any other reasons why the executable file might be covered by
28*404b540aSrobert // the GNU General Public License.
29*404b540aSrobert 
30*404b540aSrobert /** @file atomicity.h
31*404b540aSrobert  *  This is an internal header file, included by other library headers.
32*404b540aSrobert  *  You should not attempt to use it directly.
33*404b540aSrobert  */
34*404b540aSrobert 
35*404b540aSrobert #ifndef _GLIBCXX_ATOMICITY_H
36*404b540aSrobert #define _GLIBCXX_ATOMICITY_H	1
37*404b540aSrobert 
38*404b540aSrobert #include <bits/c++config.h>
39*404b540aSrobert #include <bits/gthr.h>
40*404b540aSrobert #include <bits/atomic_word.h>
41*404b540aSrobert 
_GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)42*404b540aSrobert _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
43*404b540aSrobert 
44*404b540aSrobert   // Functions for portable atomic access.
45*404b540aSrobert   // To abstract locking primatives across all thread policies, use:
46*404b540aSrobert   // __exchange_and_add_dispatch
47*404b540aSrobert   // __atomic_add_dispatch
48*404b540aSrobert #ifdef _GLIBCXX_ATOMIC_BUILTINS
49*404b540aSrobert   static inline _Atomic_word
50*404b540aSrobert   __exchange_and_add(volatile _Atomic_word* __mem, int __val)
51*404b540aSrobert   { return __sync_fetch_and_add(__mem, __val); }
52*404b540aSrobert 
53*404b540aSrobert   static inline void
__atomic_add(volatile _Atomic_word * __mem,int __val)54*404b540aSrobert   __atomic_add(volatile _Atomic_word* __mem, int __val)
55*404b540aSrobert   { __sync_fetch_and_add(__mem, __val); }
56*404b540aSrobert #else
57*404b540aSrobert   _Atomic_word
58*404b540aSrobert   __attribute__ ((__unused__))
59*404b540aSrobert   __exchange_and_add(volatile _Atomic_word*, int);
60*404b540aSrobert 
61*404b540aSrobert   void
62*404b540aSrobert   __attribute__ ((__unused__))
63*404b540aSrobert   __atomic_add(volatile _Atomic_word*, int);
64*404b540aSrobert #endif
65*404b540aSrobert 
66*404b540aSrobert   static inline _Atomic_word
__exchange_and_add_single(_Atomic_word * __mem,int __val)67*404b540aSrobert   __exchange_and_add_single(_Atomic_word* __mem, int __val)
68*404b540aSrobert   {
69*404b540aSrobert     _Atomic_word __result = *__mem;
70*404b540aSrobert     *__mem += __val;
71*404b540aSrobert     return __result;
72*404b540aSrobert   }
73*404b540aSrobert 
74*404b540aSrobert   static inline void
__atomic_add_single(_Atomic_word * __mem,int __val)75*404b540aSrobert   __atomic_add_single(_Atomic_word* __mem, int __val)
76*404b540aSrobert   { *__mem += __val; }
77*404b540aSrobert 
78*404b540aSrobert   static inline _Atomic_word
79*404b540aSrobert   __attribute__ ((__unused__))
__exchange_and_add_dispatch(_Atomic_word * __mem,int __val)80*404b540aSrobert   __exchange_and_add_dispatch(_Atomic_word* __mem, int __val)
81*404b540aSrobert   {
82*404b540aSrobert #ifdef __GTHREADS
83*404b540aSrobert     if (__gthread_active_p())
84*404b540aSrobert       return __exchange_and_add(__mem, __val);
85*404b540aSrobert     else
86*404b540aSrobert       return __exchange_and_add_single(__mem, __val);
87*404b540aSrobert #else
88*404b540aSrobert     return __exchange_and_add_single(__mem, __val);
89*404b540aSrobert #endif
90*404b540aSrobert   }
91*404b540aSrobert 
92*404b540aSrobert   static inline void
93*404b540aSrobert   __attribute__ ((__unused__))
__atomic_add_dispatch(_Atomic_word * __mem,int __val)94*404b540aSrobert   __atomic_add_dispatch(_Atomic_word* __mem, int __val)
95*404b540aSrobert   {
96*404b540aSrobert #ifdef __GTHREADS
97*404b540aSrobert     if (__gthread_active_p())
98*404b540aSrobert       __atomic_add(__mem, __val);
99*404b540aSrobert     else
100*404b540aSrobert       __atomic_add_single(__mem, __val);
101*404b540aSrobert #else
102*404b540aSrobert     __atomic_add_single(__mem, __val);
103*404b540aSrobert #endif
104*404b540aSrobert   }
105*404b540aSrobert 
106*404b540aSrobert _GLIBCXX_END_NAMESPACE
107*404b540aSrobert 
108*404b540aSrobert // Even if the CPU doesn't need a memory barrier, we need to ensure
109*404b540aSrobert // that the compiler doesn't reorder memory accesses across the
110*404b540aSrobert // barriers.
111*404b540aSrobert #ifndef _GLIBCXX_READ_MEM_BARRIER
112*404b540aSrobert #define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory")
113*404b540aSrobert #endif
114*404b540aSrobert #ifndef _GLIBCXX_WRITE_MEM_BARRIER
115*404b540aSrobert #define _GLIBCXX_WRITE_MEM_BARRIER __asm __volatile ("":::"memory")
116*404b540aSrobert #endif
117*404b540aSrobert 
118*404b540aSrobert #endif
119