xref: /netbsd-src/external/bsd/jemalloc.old/include/jemalloc/internal/atomic_msvc.h (revision 8e33eff89e26cf71871ead62f0d5063e1313c33a)
1*8e33eff8Schristos #ifndef JEMALLOC_INTERNAL_ATOMIC_MSVC_H
2*8e33eff8Schristos #define JEMALLOC_INTERNAL_ATOMIC_MSVC_H
3*8e33eff8Schristos 
4*8e33eff8Schristos #define ATOMIC_INIT(...) {__VA_ARGS__}
5*8e33eff8Schristos 
6*8e33eff8Schristos typedef enum {
7*8e33eff8Schristos 	atomic_memory_order_relaxed,
8*8e33eff8Schristos 	atomic_memory_order_acquire,
9*8e33eff8Schristos 	atomic_memory_order_release,
10*8e33eff8Schristos 	atomic_memory_order_acq_rel,
11*8e33eff8Schristos 	atomic_memory_order_seq_cst
12*8e33eff8Schristos } atomic_memory_order_t;
13*8e33eff8Schristos 
14*8e33eff8Schristos typedef char atomic_repr_0_t;
15*8e33eff8Schristos typedef short atomic_repr_1_t;
16*8e33eff8Schristos typedef long atomic_repr_2_t;
17*8e33eff8Schristos typedef __int64 atomic_repr_3_t;
18*8e33eff8Schristos 
19*8e33eff8Schristos ATOMIC_INLINE void
20*8e33eff8Schristos atomic_fence(atomic_memory_order_t mo) {
21*8e33eff8Schristos 	_ReadWriteBarrier();
22*8e33eff8Schristos #  if defined(_M_ARM) || defined(_M_ARM64)
23*8e33eff8Schristos 	/* ARM needs a barrier for everything but relaxed. */
24*8e33eff8Schristos 	if (mo != atomic_memory_order_relaxed) {
25*8e33eff8Schristos 		MemoryBarrier();
26*8e33eff8Schristos 	}
27*8e33eff8Schristos #  elif defined(_M_IX86) || defined (_M_X64)
28*8e33eff8Schristos 	/* x86 needs a barrier only for seq_cst. */
29*8e33eff8Schristos 	if (mo == atomic_memory_order_seq_cst) {
30*8e33eff8Schristos 		MemoryBarrier();
31*8e33eff8Schristos 	}
32*8e33eff8Schristos #  else
33*8e33eff8Schristos #  error "Don't know how to create atomics for this platform for MSVC."
34*8e33eff8Schristos #  endif
35*8e33eff8Schristos 	_ReadWriteBarrier();
36*8e33eff8Schristos }
37*8e33eff8Schristos 
38*8e33eff8Schristos #define ATOMIC_INTERLOCKED_REPR(lg_size) atomic_repr_ ## lg_size ## _t
39*8e33eff8Schristos 
40*8e33eff8Schristos #define ATOMIC_CONCAT(a, b) ATOMIC_RAW_CONCAT(a, b)
41*8e33eff8Schristos #define ATOMIC_RAW_CONCAT(a, b) a ## b
42*8e33eff8Schristos 
43*8e33eff8Schristos #define ATOMIC_INTERLOCKED_NAME(base_name, lg_size) ATOMIC_CONCAT(	\
44*8e33eff8Schristos     base_name, ATOMIC_INTERLOCKED_SUFFIX(lg_size))
45*8e33eff8Schristos 
46*8e33eff8Schristos #define ATOMIC_INTERLOCKED_SUFFIX(lg_size)				\
47*8e33eff8Schristos     ATOMIC_CONCAT(ATOMIC_INTERLOCKED_SUFFIX_, lg_size)
48*8e33eff8Schristos 
49*8e33eff8Schristos #define ATOMIC_INTERLOCKED_SUFFIX_0 8
50*8e33eff8Schristos #define ATOMIC_INTERLOCKED_SUFFIX_1 16
51*8e33eff8Schristos #define ATOMIC_INTERLOCKED_SUFFIX_2
52*8e33eff8Schristos #define ATOMIC_INTERLOCKED_SUFFIX_3 64
53*8e33eff8Schristos 
54*8e33eff8Schristos #define JEMALLOC_GENERATE_ATOMICS(type, short_type, lg_size)		\
55*8e33eff8Schristos typedef struct {							\
56*8e33eff8Schristos 	ATOMIC_INTERLOCKED_REPR(lg_size) repr;				\
57*8e33eff8Schristos } atomic_##short_type##_t;						\
58*8e33eff8Schristos 									\
59*8e33eff8Schristos ATOMIC_INLINE type							\
60*8e33eff8Schristos atomic_load_##short_type(const atomic_##short_type##_t *a,		\
61*8e33eff8Schristos     atomic_memory_order_t mo) {						\
62*8e33eff8Schristos 	ATOMIC_INTERLOCKED_REPR(lg_size) ret = a->repr;			\
63*8e33eff8Schristos 	if (mo != atomic_memory_order_relaxed) {			\
64*8e33eff8Schristos 		atomic_fence(atomic_memory_order_acquire);		\
65*8e33eff8Schristos 	}								\
66*8e33eff8Schristos 	return (type) ret;						\
67*8e33eff8Schristos }									\
68*8e33eff8Schristos 									\
69*8e33eff8Schristos ATOMIC_INLINE void							\
70*8e33eff8Schristos atomic_store_##short_type(atomic_##short_type##_t *a,			\
71*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
72*8e33eff8Schristos 	if (mo != atomic_memory_order_relaxed) {			\
73*8e33eff8Schristos 		atomic_fence(atomic_memory_order_release);		\
74*8e33eff8Schristos 	}								\
75*8e33eff8Schristos 	a->repr = (ATOMIC_INTERLOCKED_REPR(lg_size)) val;		\
76*8e33eff8Schristos 	if (mo == atomic_memory_order_seq_cst) {			\
77*8e33eff8Schristos 		atomic_fence(atomic_memory_order_seq_cst);		\
78*8e33eff8Schristos 	}								\
79*8e33eff8Schristos }									\
80*8e33eff8Schristos 									\
81*8e33eff8Schristos ATOMIC_INLINE type							\
82*8e33eff8Schristos atomic_exchange_##short_type(atomic_##short_type##_t *a, type val,	\
83*8e33eff8Schristos     atomic_memory_order_t mo) {						\
84*8e33eff8Schristos 	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedExchange,	\
85*8e33eff8Schristos 	    lg_size)(&a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);	\
86*8e33eff8Schristos }									\
87*8e33eff8Schristos 									\
88*8e33eff8Schristos ATOMIC_INLINE bool							\
89*8e33eff8Schristos atomic_compare_exchange_weak_##short_type(atomic_##short_type##_t *a,	\
90*8e33eff8Schristos     type *expected, type desired, atomic_memory_order_t success_mo,	\
91*8e33eff8Schristos     atomic_memory_order_t failure_mo) {					\
92*8e33eff8Schristos 	ATOMIC_INTERLOCKED_REPR(lg_size) e =				\
93*8e33eff8Schristos 	    (ATOMIC_INTERLOCKED_REPR(lg_size))*expected;		\
94*8e33eff8Schristos 	ATOMIC_INTERLOCKED_REPR(lg_size) d =				\
95*8e33eff8Schristos 	    (ATOMIC_INTERLOCKED_REPR(lg_size))desired;			\
96*8e33eff8Schristos 	ATOMIC_INTERLOCKED_REPR(lg_size) old =				\
97*8e33eff8Schristos 	    ATOMIC_INTERLOCKED_NAME(_InterlockedCompareExchange, 	\
98*8e33eff8Schristos 		lg_size)(&a->repr, d, e);				\
99*8e33eff8Schristos 	if (old == e) {							\
100*8e33eff8Schristos 		return true;						\
101*8e33eff8Schristos 	} else {							\
102*8e33eff8Schristos 		*expected = (type)old;					\
103*8e33eff8Schristos 		return false;						\
104*8e33eff8Schristos 	}								\
105*8e33eff8Schristos }									\
106*8e33eff8Schristos 									\
107*8e33eff8Schristos ATOMIC_INLINE bool							\
108*8e33eff8Schristos atomic_compare_exchange_strong_##short_type(atomic_##short_type##_t *a,	\
109*8e33eff8Schristos     type *expected, type desired, atomic_memory_order_t success_mo,	\
110*8e33eff8Schristos     atomic_memory_order_t failure_mo) {					\
111*8e33eff8Schristos 	/* We implement the weak version with strong semantics. */	\
112*8e33eff8Schristos 	return atomic_compare_exchange_weak_##short_type(a, expected,	\
113*8e33eff8Schristos 	    desired, success_mo, failure_mo);				\
114*8e33eff8Schristos }
115*8e33eff8Schristos 
116*8e33eff8Schristos 
117*8e33eff8Schristos #define JEMALLOC_GENERATE_INT_ATOMICS(type, short_type, lg_size)	\
118*8e33eff8Schristos JEMALLOC_GENERATE_ATOMICS(type, short_type, lg_size)			\
119*8e33eff8Schristos 									\
120*8e33eff8Schristos ATOMIC_INLINE type							\
121*8e33eff8Schristos atomic_fetch_add_##short_type(atomic_##short_type##_t *a,		\
122*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
123*8e33eff8Schristos 	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedExchangeAdd,	\
124*8e33eff8Schristos 	    lg_size)(&a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);	\
125*8e33eff8Schristos }									\
126*8e33eff8Schristos 									\
127*8e33eff8Schristos ATOMIC_INLINE type							\
128*8e33eff8Schristos atomic_fetch_sub_##short_type(atomic_##short_type##_t *a,		\
129*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
130*8e33eff8Schristos 	/*								\
131*8e33eff8Schristos 	 * MSVC warns on negation of unsigned operands, but for us it	\
132*8e33eff8Schristos 	 * gives exactly the right semantics (MAX_TYPE + 1 - operand).	\
133*8e33eff8Schristos 	 */								\
134*8e33eff8Schristos 	__pragma(warning(push))						\
135*8e33eff8Schristos 	__pragma(warning(disable: 4146))				\
136*8e33eff8Schristos 	return atomic_fetch_add_##short_type(a, -val, mo);		\
137*8e33eff8Schristos 	__pragma(warning(pop))						\
138*8e33eff8Schristos }									\
139*8e33eff8Schristos ATOMIC_INLINE type							\
140*8e33eff8Schristos atomic_fetch_and_##short_type(atomic_##short_type##_t *a,		\
141*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
142*8e33eff8Schristos 	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedAnd, lg_size)(	\
143*8e33eff8Schristos 	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
144*8e33eff8Schristos }									\
145*8e33eff8Schristos ATOMIC_INLINE type							\
146*8e33eff8Schristos atomic_fetch_or_##short_type(atomic_##short_type##_t *a,		\
147*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
148*8e33eff8Schristos 	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedOr, lg_size)(	\
149*8e33eff8Schristos 	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
150*8e33eff8Schristos }									\
151*8e33eff8Schristos ATOMIC_INLINE type							\
152*8e33eff8Schristos atomic_fetch_xor_##short_type(atomic_##short_type##_t *a,		\
153*8e33eff8Schristos     type val, atomic_memory_order_t mo) {				\
154*8e33eff8Schristos 	return (type)ATOMIC_INTERLOCKED_NAME(_InterlockedXor, lg_size)(	\
155*8e33eff8Schristos 	    &a->repr, (ATOMIC_INTERLOCKED_REPR(lg_size))val);		\
156*8e33eff8Schristos }
157*8e33eff8Schristos 
158*8e33eff8Schristos #endif /* JEMALLOC_INTERNAL_ATOMIC_MSVC_H */
159