xref: /dpdk/lib/eal/x86/include/rte_atomic_32.h (revision 971d2b57972919527e27ed683032a71864a2eb56)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation.
3  */
4 
5 /*
6  * Inspired from FreeBSD src/sys/i386/include/atomic.h
7  * Copyright (c) 1998 Doug Rabson
8  * All rights reserved.
9  */
10 
11 #ifndef _RTE_ATOMIC_X86_H_
12 #error do not include this file directly, use <rte_atomic.h> instead
13 #endif
14 
15 #ifndef _RTE_ATOMIC_I686_H_
16 #define _RTE_ATOMIC_I686_H_
17 
18 #include <stdint.h>
19 #include <rte_common.h>
20 
21 /*------------------------- 64 bit atomic operations -------------------------*/
22 
23 #ifndef RTE_FORCE_INTRINSICS
24 static inline int
rte_atomic64_cmpset(volatile uint64_t * dst,uint64_t exp,uint64_t src)25 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
26 {
27 	uint8_t res;
28 	union {
29 		struct {
30 			uint32_t l32;
31 			uint32_t h32;
32 		};
33 		uint64_t u64;
34 	} _exp, _src;
35 
36 	_exp.u64 = exp;
37 	_src.u64 = src;
38 
39 #ifndef __PIC__
40     asm volatile (
41             MPLOCKED
42             "cmpxchg8b (%[dst]);"
43             "setz %[res];"
44             : [res] "=a" (res)      /* result in eax */
45             : [dst] "S" (dst),      /* esi */
46              "b" (_src.l32),       /* ebx */
47              "c" (_src.h32),       /* ecx */
48              "a" (_exp.l32),       /* eax */
49              "d" (_exp.h32)        /* edx */
50 			: "memory" );           /* no-clobber list */
51 #else
52 	asm volatile (
53             "xchgl %%ebx, %%edi;\n"
54 			MPLOCKED
55 			"cmpxchg8b (%[dst]);"
56 			"setz %[res];"
57             "xchgl %%ebx, %%edi;\n"
58 			: [res] "=a" (res)      /* result in eax */
59 			: [dst] "S" (dst),      /* esi */
60 			  "D" (_src.l32),       /* ebx */
61 			  "c" (_src.h32),       /* ecx */
62 			  "a" (_exp.l32),       /* eax */
63 			  "d" (_exp.h32)        /* edx */
64 			: "memory" );           /* no-clobber list */
65 #endif
66 
67 	return res;
68 }
69 
70 static inline uint64_t
rte_atomic64_exchange(volatile uint64_t * dest,uint64_t val)71 rte_atomic64_exchange(volatile uint64_t *dest, uint64_t val)
72 {
73 	uint64_t old;
74 
75 	do {
76 		old = *dest;
77 	} while (rte_atomic64_cmpset(dest, old, val) == 0);
78 
79 	return old;
80 }
81 
82 static inline void
rte_atomic64_init(rte_atomic64_t * v)83 rte_atomic64_init(rte_atomic64_t *v)
84 {
85 	int success = 0;
86 	uint64_t tmp;
87 
88 	while (success == 0) {
89 		tmp = v->cnt;
90 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
91 		                              tmp, 0);
92 	}
93 }
94 
95 static inline int64_t
rte_atomic64_read(rte_atomic64_t * v)96 rte_atomic64_read(rte_atomic64_t *v)
97 {
98 	int success = 0;
99 	uint64_t tmp;
100 
101 	while (success == 0) {
102 		tmp = v->cnt;
103 		/* replace the value by itself */
104 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
105 		                              tmp, tmp);
106 	}
107 	return tmp;
108 }
109 
110 static inline void
rte_atomic64_set(rte_atomic64_t * v,int64_t new_value)111 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
112 {
113 	int success = 0;
114 	uint64_t tmp;
115 
116 	while (success == 0) {
117 		tmp = v->cnt;
118 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
119 		                              tmp, new_value);
120 	}
121 }
122 
123 static inline void
rte_atomic64_add(rte_atomic64_t * v,int64_t inc)124 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
125 {
126 	int success = 0;
127 	uint64_t tmp;
128 
129 	while (success == 0) {
130 		tmp = v->cnt;
131 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
132 		                              tmp, tmp + inc);
133 	}
134 }
135 
136 static inline void
rte_atomic64_sub(rte_atomic64_t * v,int64_t dec)137 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
138 {
139 	int success = 0;
140 	uint64_t tmp;
141 
142 	while (success == 0) {
143 		tmp = v->cnt;
144 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
145 		                              tmp, tmp - dec);
146 	}
147 }
148 
149 static inline void
rte_atomic64_inc(rte_atomic64_t * v)150 rte_atomic64_inc(rte_atomic64_t *v)
151 {
152 	rte_atomic64_add(v, 1);
153 }
154 
155 static inline void
rte_atomic64_dec(rte_atomic64_t * v)156 rte_atomic64_dec(rte_atomic64_t *v)
157 {
158 	rte_atomic64_sub(v, 1);
159 }
160 
161 static inline int64_t
rte_atomic64_add_return(rte_atomic64_t * v,int64_t inc)162 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
163 {
164 	int success = 0;
165 	uint64_t tmp;
166 
167 	while (success == 0) {
168 		tmp = v->cnt;
169 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
170 		                              tmp, tmp + inc);
171 	}
172 
173 	return tmp + inc;
174 }
175 
176 static inline int64_t
rte_atomic64_sub_return(rte_atomic64_t * v,int64_t dec)177 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
178 {
179 	int success = 0;
180 	uint64_t tmp;
181 
182 	while (success == 0) {
183 		tmp = v->cnt;
184 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
185 		                              tmp, tmp - dec);
186 	}
187 
188 	return tmp - dec;
189 }
190 
rte_atomic64_inc_and_test(rte_atomic64_t * v)191 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
192 {
193 	return rte_atomic64_add_return(v, 1) == 0;
194 }
195 
rte_atomic64_dec_and_test(rte_atomic64_t * v)196 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
197 {
198 	return rte_atomic64_sub_return(v, 1) == 0;
199 }
200 
rte_atomic64_test_and_set(rte_atomic64_t * v)201 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
202 {
203 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
204 }
205 
rte_atomic64_clear(rte_atomic64_t * v)206 static inline void rte_atomic64_clear(rte_atomic64_t *v)
207 {
208 	rte_atomic64_set(v, 0);
209 }
210 #endif
211 
212 #endif /* _RTE_ATOMIC_I686_H_ */
213