xref: /dpdk/lib/eal/x86/include/rte_atomic_32.h (revision 971d2b57972919527e27ed683032a71864a2eb56)
1*99a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause
2*99a2dd95SBruce Richardson  * Copyright(c) 2010-2014 Intel Corporation.
3*99a2dd95SBruce Richardson  */
4*99a2dd95SBruce Richardson 
5*99a2dd95SBruce Richardson /*
6*99a2dd95SBruce Richardson  * Inspired from FreeBSD src/sys/i386/include/atomic.h
7*99a2dd95SBruce Richardson  * Copyright (c) 1998 Doug Rabson
8*99a2dd95SBruce Richardson  * All rights reserved.
9*99a2dd95SBruce Richardson  */
10*99a2dd95SBruce Richardson 
11*99a2dd95SBruce Richardson #ifndef _RTE_ATOMIC_X86_H_
12*99a2dd95SBruce Richardson #error do not include this file directly, use <rte_atomic.h> instead
13*99a2dd95SBruce Richardson #endif
14*99a2dd95SBruce Richardson 
15*99a2dd95SBruce Richardson #ifndef _RTE_ATOMIC_I686_H_
16*99a2dd95SBruce Richardson #define _RTE_ATOMIC_I686_H_
17*99a2dd95SBruce Richardson 
18*99a2dd95SBruce Richardson #include <stdint.h>
19*99a2dd95SBruce Richardson #include <rte_common.h>
20*99a2dd95SBruce Richardson 
21*99a2dd95SBruce Richardson /*------------------------- 64 bit atomic operations -------------------------*/
22*99a2dd95SBruce Richardson 
23*99a2dd95SBruce Richardson #ifndef RTE_FORCE_INTRINSICS
24*99a2dd95SBruce Richardson static inline int
rte_atomic64_cmpset(volatile uint64_t * dst,uint64_t exp,uint64_t src)25*99a2dd95SBruce Richardson rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
26*99a2dd95SBruce Richardson {
27*99a2dd95SBruce Richardson 	uint8_t res;
28*99a2dd95SBruce Richardson 	union {
29*99a2dd95SBruce Richardson 		struct {
30*99a2dd95SBruce Richardson 			uint32_t l32;
31*99a2dd95SBruce Richardson 			uint32_t h32;
32*99a2dd95SBruce Richardson 		};
33*99a2dd95SBruce Richardson 		uint64_t u64;
34*99a2dd95SBruce Richardson 	} _exp, _src;
35*99a2dd95SBruce Richardson 
36*99a2dd95SBruce Richardson 	_exp.u64 = exp;
37*99a2dd95SBruce Richardson 	_src.u64 = src;
38*99a2dd95SBruce Richardson 
39*99a2dd95SBruce Richardson #ifndef __PIC__
40*99a2dd95SBruce Richardson     asm volatile (
41*99a2dd95SBruce Richardson             MPLOCKED
42*99a2dd95SBruce Richardson             "cmpxchg8b (%[dst]);"
43*99a2dd95SBruce Richardson             "setz %[res];"
44*99a2dd95SBruce Richardson             : [res] "=a" (res)      /* result in eax */
45*99a2dd95SBruce Richardson             : [dst] "S" (dst),      /* esi */
46*99a2dd95SBruce Richardson              "b" (_src.l32),       /* ebx */
47*99a2dd95SBruce Richardson              "c" (_src.h32),       /* ecx */
48*99a2dd95SBruce Richardson              "a" (_exp.l32),       /* eax */
49*99a2dd95SBruce Richardson              "d" (_exp.h32)        /* edx */
50*99a2dd95SBruce Richardson 			: "memory" );           /* no-clobber list */
51*99a2dd95SBruce Richardson #else
52*99a2dd95SBruce Richardson 	asm volatile (
53*99a2dd95SBruce Richardson             "xchgl %%ebx, %%edi;\n"
54*99a2dd95SBruce Richardson 			MPLOCKED
55*99a2dd95SBruce Richardson 			"cmpxchg8b (%[dst]);"
56*99a2dd95SBruce Richardson 			"setz %[res];"
57*99a2dd95SBruce Richardson             "xchgl %%ebx, %%edi;\n"
58*99a2dd95SBruce Richardson 			: [res] "=a" (res)      /* result in eax */
59*99a2dd95SBruce Richardson 			: [dst] "S" (dst),      /* esi */
60*99a2dd95SBruce Richardson 			  "D" (_src.l32),       /* ebx */
61*99a2dd95SBruce Richardson 			  "c" (_src.h32),       /* ecx */
62*99a2dd95SBruce Richardson 			  "a" (_exp.l32),       /* eax */
63*99a2dd95SBruce Richardson 			  "d" (_exp.h32)        /* edx */
64*99a2dd95SBruce Richardson 			: "memory" );           /* no-clobber list */
65*99a2dd95SBruce Richardson #endif
66*99a2dd95SBruce Richardson 
67*99a2dd95SBruce Richardson 	return res;
68*99a2dd95SBruce Richardson }
69*99a2dd95SBruce Richardson 
70*99a2dd95SBruce Richardson static inline uint64_t
rte_atomic64_exchange(volatile uint64_t * dest,uint64_t val)71*99a2dd95SBruce Richardson rte_atomic64_exchange(volatile uint64_t *dest, uint64_t val)
72*99a2dd95SBruce Richardson {
73*99a2dd95SBruce Richardson 	uint64_t old;
74*99a2dd95SBruce Richardson 
75*99a2dd95SBruce Richardson 	do {
76*99a2dd95SBruce Richardson 		old = *dest;
77*99a2dd95SBruce Richardson 	} while (rte_atomic64_cmpset(dest, old, val) == 0);
78*99a2dd95SBruce Richardson 
79*99a2dd95SBruce Richardson 	return old;
80*99a2dd95SBruce Richardson }
81*99a2dd95SBruce Richardson 
82*99a2dd95SBruce Richardson static inline void
rte_atomic64_init(rte_atomic64_t * v)83*99a2dd95SBruce Richardson rte_atomic64_init(rte_atomic64_t *v)
84*99a2dd95SBruce Richardson {
85*99a2dd95SBruce Richardson 	int success = 0;
86*99a2dd95SBruce Richardson 	uint64_t tmp;
87*99a2dd95SBruce Richardson 
88*99a2dd95SBruce Richardson 	while (success == 0) {
89*99a2dd95SBruce Richardson 		tmp = v->cnt;
90*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
91*99a2dd95SBruce Richardson 		                              tmp, 0);
92*99a2dd95SBruce Richardson 	}
93*99a2dd95SBruce Richardson }
94*99a2dd95SBruce Richardson 
95*99a2dd95SBruce Richardson static inline int64_t
rte_atomic64_read(rte_atomic64_t * v)96*99a2dd95SBruce Richardson rte_atomic64_read(rte_atomic64_t *v)
97*99a2dd95SBruce Richardson {
98*99a2dd95SBruce Richardson 	int success = 0;
99*99a2dd95SBruce Richardson 	uint64_t tmp;
100*99a2dd95SBruce Richardson 
101*99a2dd95SBruce Richardson 	while (success == 0) {
102*99a2dd95SBruce Richardson 		tmp = v->cnt;
103*99a2dd95SBruce Richardson 		/* replace the value by itself */
104*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
105*99a2dd95SBruce Richardson 		                              tmp, tmp);
106*99a2dd95SBruce Richardson 	}
107*99a2dd95SBruce Richardson 	return tmp;
108*99a2dd95SBruce Richardson }
109*99a2dd95SBruce Richardson 
110*99a2dd95SBruce Richardson static inline void
rte_atomic64_set(rte_atomic64_t * v,int64_t new_value)111*99a2dd95SBruce Richardson rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
112*99a2dd95SBruce Richardson {
113*99a2dd95SBruce Richardson 	int success = 0;
114*99a2dd95SBruce Richardson 	uint64_t tmp;
115*99a2dd95SBruce Richardson 
116*99a2dd95SBruce Richardson 	while (success == 0) {
117*99a2dd95SBruce Richardson 		tmp = v->cnt;
118*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
119*99a2dd95SBruce Richardson 		                              tmp, new_value);
120*99a2dd95SBruce Richardson 	}
121*99a2dd95SBruce Richardson }
122*99a2dd95SBruce Richardson 
123*99a2dd95SBruce Richardson static inline void
rte_atomic64_add(rte_atomic64_t * v,int64_t inc)124*99a2dd95SBruce Richardson rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
125*99a2dd95SBruce Richardson {
126*99a2dd95SBruce Richardson 	int success = 0;
127*99a2dd95SBruce Richardson 	uint64_t tmp;
128*99a2dd95SBruce Richardson 
129*99a2dd95SBruce Richardson 	while (success == 0) {
130*99a2dd95SBruce Richardson 		tmp = v->cnt;
131*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
132*99a2dd95SBruce Richardson 		                              tmp, tmp + inc);
133*99a2dd95SBruce Richardson 	}
134*99a2dd95SBruce Richardson }
135*99a2dd95SBruce Richardson 
136*99a2dd95SBruce Richardson static inline void
rte_atomic64_sub(rte_atomic64_t * v,int64_t dec)137*99a2dd95SBruce Richardson rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
138*99a2dd95SBruce Richardson {
139*99a2dd95SBruce Richardson 	int success = 0;
140*99a2dd95SBruce Richardson 	uint64_t tmp;
141*99a2dd95SBruce Richardson 
142*99a2dd95SBruce Richardson 	while (success == 0) {
143*99a2dd95SBruce Richardson 		tmp = v->cnt;
144*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
145*99a2dd95SBruce Richardson 		                              tmp, tmp - dec);
146*99a2dd95SBruce Richardson 	}
147*99a2dd95SBruce Richardson }
148*99a2dd95SBruce Richardson 
149*99a2dd95SBruce Richardson static inline void
rte_atomic64_inc(rte_atomic64_t * v)150*99a2dd95SBruce Richardson rte_atomic64_inc(rte_atomic64_t *v)
151*99a2dd95SBruce Richardson {
152*99a2dd95SBruce Richardson 	rte_atomic64_add(v, 1);
153*99a2dd95SBruce Richardson }
154*99a2dd95SBruce Richardson 
155*99a2dd95SBruce Richardson static inline void
rte_atomic64_dec(rte_atomic64_t * v)156*99a2dd95SBruce Richardson rte_atomic64_dec(rte_atomic64_t *v)
157*99a2dd95SBruce Richardson {
158*99a2dd95SBruce Richardson 	rte_atomic64_sub(v, 1);
159*99a2dd95SBruce Richardson }
160*99a2dd95SBruce Richardson 
161*99a2dd95SBruce Richardson static inline int64_t
rte_atomic64_add_return(rte_atomic64_t * v,int64_t inc)162*99a2dd95SBruce Richardson rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
163*99a2dd95SBruce Richardson {
164*99a2dd95SBruce Richardson 	int success = 0;
165*99a2dd95SBruce Richardson 	uint64_t tmp;
166*99a2dd95SBruce Richardson 
167*99a2dd95SBruce Richardson 	while (success == 0) {
168*99a2dd95SBruce Richardson 		tmp = v->cnt;
169*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
170*99a2dd95SBruce Richardson 		                              tmp, tmp + inc);
171*99a2dd95SBruce Richardson 	}
172*99a2dd95SBruce Richardson 
173*99a2dd95SBruce Richardson 	return tmp + inc;
174*99a2dd95SBruce Richardson }
175*99a2dd95SBruce Richardson 
176*99a2dd95SBruce Richardson static inline int64_t
rte_atomic64_sub_return(rte_atomic64_t * v,int64_t dec)177*99a2dd95SBruce Richardson rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
178*99a2dd95SBruce Richardson {
179*99a2dd95SBruce Richardson 	int success = 0;
180*99a2dd95SBruce Richardson 	uint64_t tmp;
181*99a2dd95SBruce Richardson 
182*99a2dd95SBruce Richardson 	while (success == 0) {
183*99a2dd95SBruce Richardson 		tmp = v->cnt;
184*99a2dd95SBruce Richardson 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
185*99a2dd95SBruce Richardson 		                              tmp, tmp - dec);
186*99a2dd95SBruce Richardson 	}
187*99a2dd95SBruce Richardson 
188*99a2dd95SBruce Richardson 	return tmp - dec;
189*99a2dd95SBruce Richardson }
190*99a2dd95SBruce Richardson 
rte_atomic64_inc_and_test(rte_atomic64_t * v)191*99a2dd95SBruce Richardson static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
192*99a2dd95SBruce Richardson {
193*99a2dd95SBruce Richardson 	return rte_atomic64_add_return(v, 1) == 0;
194*99a2dd95SBruce Richardson }
195*99a2dd95SBruce Richardson 
rte_atomic64_dec_and_test(rte_atomic64_t * v)196*99a2dd95SBruce Richardson static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
197*99a2dd95SBruce Richardson {
198*99a2dd95SBruce Richardson 	return rte_atomic64_sub_return(v, 1) == 0;
199*99a2dd95SBruce Richardson }
200*99a2dd95SBruce Richardson 
rte_atomic64_test_and_set(rte_atomic64_t * v)201*99a2dd95SBruce Richardson static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
202*99a2dd95SBruce Richardson {
203*99a2dd95SBruce Richardson 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
204*99a2dd95SBruce Richardson }
205*99a2dd95SBruce Richardson 
rte_atomic64_clear(rte_atomic64_t * v)206*99a2dd95SBruce Richardson static inline void rte_atomic64_clear(rte_atomic64_t *v)
207*99a2dd95SBruce Richardson {
208*99a2dd95SBruce Richardson 	rte_atomic64_set(v, 0);
209*99a2dd95SBruce Richardson }
210*99a2dd95SBruce Richardson #endif
211*99a2dd95SBruce Richardson 
212*99a2dd95SBruce Richardson #endif /* _RTE_ATOMIC_I686_H_ */
213