xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 558f03577b28d2757bc93fc26cd1217026c48d0c)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 
17 #include <rte_common.h>
18 #include <rte_stdatomic.h>
19 
20 #ifdef __DOXYGEN__
21 
22 /** @name Memory Barrier
23  */
24 ///@{
25 /**
26  * General memory barrier.
27  *
28  * Guarantees that the LOAD and STORE operations generated before the
29  * barrier occur before the LOAD and STORE operations generated after.
30  */
31 static inline void rte_mb(void);
32 
33 /**
34  * Write memory barrier.
35  *
36  * Guarantees that the STORE operations generated before the barrier
37  * occur before the STORE operations generated after.
38  */
39 static inline void rte_wmb(void);
40 
41 /**
42  * Read memory barrier.
43  *
44  * Guarantees that the LOAD operations generated before the barrier
45  * occur before the LOAD operations generated after.
46  */
47 static inline void rte_rmb(void);
48 ///@}
49 
50 /** @name SMP Memory Barrier
51  */
52 ///@{
53 /**
54  * General memory barrier between lcores
55  *
56  * Guarantees that the LOAD and STORE operations that precede the
57  * rte_smp_mb() call are globally visible across the lcores
58  * before the LOAD and STORE operations that follows it.
59  *
60  * @note
61  *  This function is deprecated.
62  *  It provides similar synchronization primitive as atomic fence,
63  *  but has different syntax and memory ordering semantic. Hence
64  *  deprecated for the simplicity of memory ordering semantics in use.
65  *
66  *  rte_atomic_thread_fence(rte_memory_order_acq_rel) should be used instead.
67  */
68 static inline void rte_smp_mb(void);
69 
70 /**
71  * Write memory barrier between lcores
72  *
73  * Guarantees that the STORE operations that precede the
74  * rte_smp_wmb() call are globally visible across the lcores
75  * before the STORE operations that follows it.
76  *
77  * @note
78  *  This function is deprecated.
79  *  It provides similar synchronization primitive as atomic fence,
80  *  but has different syntax and memory ordering semantic. Hence
81  *  deprecated for the simplicity of memory ordering semantics in use.
82  *
83  *  rte_atomic_thread_fence(rte_memory_order_release) should be used instead.
84  *  The fence also guarantees LOAD operations that precede the call
85  *  are globally visible across the lcores before the STORE operations
86  *  that follows it.
87  */
88 static inline void rte_smp_wmb(void);
89 
90 /**
91  * Read memory barrier between lcores
92  *
93  * Guarantees that the LOAD operations that precede the
94  * rte_smp_rmb() call are globally visible across the lcores
95  * before the LOAD operations that follows it.
96  *
97  * @note
98  *  This function is deprecated.
99  *  It provides similar synchronization primitive as atomic fence,
100  *  but has different syntax and memory ordering semantic. Hence
101  *  deprecated for the simplicity of memory ordering semantics in use.
102  *
103  *  rte_atomic_thread_fence(rte_memory_order_acquire) should be used instead.
104  *  The fence also guarantees LOAD operations that precede the call
105  *  are globally visible across the lcores before the STORE operations
106  *  that follows it.
107  */
108 static inline void rte_smp_rmb(void);
109 ///@}
110 
111 /** @name I/O Memory Barrier
112  */
113 ///@{
114 /**
115  * General memory barrier for I/O device
116  *
117  * Guarantees that the LOAD and STORE operations that precede the
118  * rte_io_mb() call are visible to I/O device or CPU before the
119  * LOAD and STORE operations that follow it.
120  */
121 static inline void rte_io_mb(void);
122 
123 /**
124  * Write memory barrier for I/O device
125  *
126  * Guarantees that the STORE operations that precede the
127  * rte_io_wmb() call are visible to I/O device before the STORE
128  * operations that follow it.
129  */
130 static inline void rte_io_wmb(void);
131 
132 /**
133  * Read memory barrier for IO device
134  *
135  * Guarantees that the LOAD operations on I/O device that precede the
136  * rte_io_rmb() call are visible to CPU before the LOAD
137  * operations that follow it.
138  */
139 static inline void rte_io_rmb(void);
140 ///@}
141 
142 #endif /* __DOXYGEN__ */
143 
144 /**
145  * Compiler barrier.
146  *
147  * Guarantees that operation reordering does not occur at compile time
148  * for operations directly before and after the barrier.
149  */
150 #ifdef RTE_TOOLCHAIN_MSVC
151 #define rte_compiler_barrier() _ReadWriteBarrier()
152 #else
153 #define	rte_compiler_barrier() do {		\
154 	asm volatile ("" : : : "memory");	\
155 } while(0)
156 #endif
157 
158 /**
159  * Synchronization fence between threads based on the specified memory order.
160  */
161 static inline void rte_atomic_thread_fence(rte_memory_order memorder);
162 
163 /*------------------------- 16 bit atomic operations -------------------------*/
164 
165 #ifndef RTE_TOOLCHAIN_MSVC
166 
167 /**
168  * Atomic compare and set.
169  *
170  * (atomic) equivalent to:
171  *   if (*dst == exp)
172  *     *dst = src (all 16-bit words)
173  *
174  * @param dst
175  *   The destination location into which the value will be written.
176  * @param exp
177  *   The expected value.
178  * @param src
179  *   The new value.
180  * @return
181  *   Non-zero on success; 0 on failure.
182  */
183 static inline int
184 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
185 
186 #ifdef RTE_FORCE_INTRINSICS
187 static inline int
188 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
189 {
190 	return __sync_bool_compare_and_swap(dst, exp, src);
191 }
192 #endif
193 
194 /**
195  * Atomic exchange.
196  *
197  * (atomic) equivalent to:
198  *   ret = *dst
199  *   *dst = val;
200  *   return ret;
201  *
202  * @param dst
203  *   The destination location into which the value will be written.
204  * @param val
205  *   The new value.
206  * @return
207  *   The original value at that location
208  */
209 static inline uint16_t
210 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
211 
212 #ifdef RTE_FORCE_INTRINSICS
213 static inline uint16_t
214 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
215 {
216 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
217 }
218 #endif
219 
220 /**
221  * The atomic counter structure.
222  */
223 typedef struct {
224 	volatile int16_t cnt; /**< An internal counter value. */
225 } rte_atomic16_t;
226 
227 /**
228  * Static initializer for an atomic counter.
229  */
230 #define RTE_ATOMIC16_INIT(val) { (val) }
231 
232 /**
233  * Initialize an atomic counter.
234  *
235  * @param v
236  *   A pointer to the atomic counter.
237  */
238 static inline void
239 rte_atomic16_init(rte_atomic16_t *v)
240 {
241 	v->cnt = 0;
242 }
243 
244 /**
245  * Atomically read a 16-bit value from a counter.
246  *
247  * @param v
248  *   A pointer to the atomic counter.
249  * @return
250  *   The value of the counter.
251  */
252 static inline int16_t
253 rte_atomic16_read(const rte_atomic16_t *v)
254 {
255 	return v->cnt;
256 }
257 
258 /**
259  * Atomically set a counter to a 16-bit value.
260  *
261  * @param v
262  *   A pointer to the atomic counter.
263  * @param new_value
264  *   The new value for the counter.
265  */
266 static inline void
267 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268 {
269 	v->cnt = new_value;
270 }
271 
272 /**
273  * Atomically add a 16-bit value to an atomic counter.
274  *
275  * @param v
276  *   A pointer to the atomic counter.
277  * @param inc
278  *   The value to be added to the counter.
279  */
280 static inline void
281 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
282 {
283 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
284 		rte_memory_order_seq_cst);
285 }
286 
287 /**
288  * Atomically subtract a 16-bit value from an atomic counter.
289  *
290  * @param v
291  *   A pointer to the atomic counter.
292  * @param dec
293  *   The value to be subtracted from the counter.
294  */
295 static inline void
296 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
297 {
298 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
299 		rte_memory_order_seq_cst);
300 }
301 
302 /**
303  * Atomically increment a counter by one.
304  *
305  * @param v
306  *   A pointer to the atomic counter.
307  */
308 static inline void
309 rte_atomic16_inc(rte_atomic16_t *v);
310 
311 #ifdef RTE_FORCE_INTRINSICS
312 static inline void
313 rte_atomic16_inc(rte_atomic16_t *v)
314 {
315 	rte_atomic16_add(v, 1);
316 }
317 #endif
318 
319 /**
320  * Atomically decrement a counter by one.
321  *
322  * @param v
323  *   A pointer to the atomic counter.
324  */
325 static inline void
326 rte_atomic16_dec(rte_atomic16_t *v);
327 
328 #ifdef RTE_FORCE_INTRINSICS
329 static inline void
330 rte_atomic16_dec(rte_atomic16_t *v)
331 {
332 	rte_atomic16_sub(v, 1);
333 }
334 #endif
335 
336 /**
337  * Atomically add a 16-bit value to a counter and return the result.
338  *
339  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
340  * returns the value of v after addition.
341  *
342  * @param v
343  *   A pointer to the atomic counter.
344  * @param inc
345  *   The value to be added to the counter.
346  * @return
347  *   The value of v after the addition.
348  */
349 static inline int16_t
350 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
351 {
352 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
353 		rte_memory_order_seq_cst) + inc;
354 }
355 
356 /**
357  * Atomically subtract a 16-bit value from a counter and return
358  * the result.
359  *
360  * Atomically subtracts the 16-bit value (inc) from the atomic counter
361  * (v) and returns the value of v after the subtraction.
362  *
363  * @param v
364  *   A pointer to the atomic counter.
365  * @param dec
366  *   The value to be subtracted from the counter.
367  * @return
368  *   The value of v after the subtraction.
369  */
370 static inline int16_t
371 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
372 {
373 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
374 		rte_memory_order_seq_cst) - dec;
375 }
376 
377 /**
378  * Atomically increment a 16-bit counter by one and test.
379  *
380  * Atomically increments the atomic counter (v) by one and returns true if
381  * the result is 0, or false in all other cases.
382  *
383  * @param v
384  *   A pointer to the atomic counter.
385  * @return
386  *   True if the result after the increment operation is 0; false otherwise.
387  */
388 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
389 
390 #ifdef RTE_FORCE_INTRINSICS
391 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
392 {
393 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
394 		rte_memory_order_seq_cst) + 1 == 0;
395 }
396 #endif
397 
398 /**
399  * Atomically decrement a 16-bit counter by one and test.
400  *
401  * Atomically decrements the atomic counter (v) by one and returns true if
402  * the result is 0, or false in all other cases.
403  *
404  * @param v
405  *   A pointer to the atomic counter.
406  * @return
407  *   True if the result after the decrement operation is 0; false otherwise.
408  */
409 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
410 
411 #ifdef RTE_FORCE_INTRINSICS
412 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
413 {
414 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
415 		rte_memory_order_seq_cst) - 1 == 0;
416 }
417 #endif
418 
419 /**
420  * Atomically test and set a 16-bit atomic counter.
421  *
422  * If the counter value is already set, return 0 (failed). Otherwise, set
423  * the counter value to 1 and return 1 (success).
424  *
425  * @param v
426  *   A pointer to the atomic counter.
427  * @return
428  *   0 if failed; else 1, success.
429  */
430 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
431 
432 #ifdef RTE_FORCE_INTRINSICS
433 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
434 {
435 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
436 }
437 #endif
438 
439 /**
440  * Atomically set a 16-bit counter to 0.
441  *
442  * @param v
443  *   A pointer to the atomic counter.
444  */
445 static inline void rte_atomic16_clear(rte_atomic16_t *v)
446 {
447 	v->cnt = 0;
448 }
449 
450 /*------------------------- 32 bit atomic operations -------------------------*/
451 
452 /**
453  * Atomic compare and set.
454  *
455  * (atomic) equivalent to:
456  *   if (*dst == exp)
457  *     *dst = src (all 32-bit words)
458  *
459  * @param dst
460  *   The destination location into which the value will be written.
461  * @param exp
462  *   The expected value.
463  * @param src
464  *   The new value.
465  * @return
466  *   Non-zero on success; 0 on failure.
467  */
468 static inline int
469 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
470 
471 #ifdef RTE_FORCE_INTRINSICS
472 static inline int
473 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
474 {
475 	return __sync_bool_compare_and_swap(dst, exp, src);
476 }
477 #endif
478 
479 /**
480  * Atomic exchange.
481  *
482  * (atomic) equivalent to:
483  *   ret = *dst
484  *   *dst = val;
485  *   return ret;
486  *
487  * @param dst
488  *   The destination location into which the value will be written.
489  * @param val
490  *   The new value.
491  * @return
492  *   The original value at that location
493  */
494 static inline uint32_t
495 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
496 
497 #ifdef RTE_FORCE_INTRINSICS
498 static inline uint32_t
499 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
500 {
501 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
502 }
503 #endif
504 
505 /**
506  * The atomic counter structure.
507  */
508 typedef struct {
509 	volatile int32_t cnt; /**< An internal counter value. */
510 } rte_atomic32_t;
511 
512 /**
513  * Static initializer for an atomic counter.
514  */
515 #define RTE_ATOMIC32_INIT(val) { (val) }
516 
517 /**
518  * Initialize an atomic counter.
519  *
520  * @param v
521  *   A pointer to the atomic counter.
522  */
523 static inline void
524 rte_atomic32_init(rte_atomic32_t *v)
525 {
526 	v->cnt = 0;
527 }
528 
529 /**
530  * Atomically read a 32-bit value from a counter.
531  *
532  * @param v
533  *   A pointer to the atomic counter.
534  * @return
535  *   The value of the counter.
536  */
537 static inline int32_t
538 rte_atomic32_read(const rte_atomic32_t *v)
539 {
540 	return v->cnt;
541 }
542 
543 /**
544  * Atomically set a counter to a 32-bit value.
545  *
546  * @param v
547  *   A pointer to the atomic counter.
548  * @param new_value
549  *   The new value for the counter.
550  */
551 static inline void
552 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
553 {
554 	v->cnt = new_value;
555 }
556 
557 /**
558  * Atomically add a 32-bit value to an atomic counter.
559  *
560  * @param v
561  *   A pointer to the atomic counter.
562  * @param inc
563  *   The value to be added to the counter.
564  */
565 static inline void
566 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
567 {
568 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
569 		rte_memory_order_seq_cst);
570 }
571 
572 /**
573  * Atomically subtract a 32-bit value from an atomic counter.
574  *
575  * @param v
576  *   A pointer to the atomic counter.
577  * @param dec
578  *   The value to be subtracted from the counter.
579  */
580 static inline void
581 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
582 {
583 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
584 		rte_memory_order_seq_cst);
585 }
586 
587 /**
588  * Atomically increment a counter by one.
589  *
590  * @param v
591  *   A pointer to the atomic counter.
592  */
593 static inline void
594 rte_atomic32_inc(rte_atomic32_t *v);
595 
596 #ifdef RTE_FORCE_INTRINSICS
597 static inline void
598 rte_atomic32_inc(rte_atomic32_t *v)
599 {
600 	rte_atomic32_add(v, 1);
601 }
602 #endif
603 
604 /**
605  * Atomically decrement a counter by one.
606  *
607  * @param v
608  *   A pointer to the atomic counter.
609  */
610 static inline void
611 rte_atomic32_dec(rte_atomic32_t *v);
612 
613 #ifdef RTE_FORCE_INTRINSICS
614 static inline void
615 rte_atomic32_dec(rte_atomic32_t *v)
616 {
617 	rte_atomic32_sub(v,1);
618 }
619 #endif
620 
621 /**
622  * Atomically add a 32-bit value to a counter and return the result.
623  *
624  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
625  * returns the value of v after addition.
626  *
627  * @param v
628  *   A pointer to the atomic counter.
629  * @param inc
630  *   The value to be added to the counter.
631  * @return
632  *   The value of v after the addition.
633  */
634 static inline int32_t
635 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
636 {
637 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
638 		rte_memory_order_seq_cst) + inc;
639 }
640 
641 /**
642  * Atomically subtract a 32-bit value from a counter and return
643  * the result.
644  *
645  * Atomically subtracts the 32-bit value (inc) from the atomic counter
646  * (v) and returns the value of v after the subtraction.
647  *
648  * @param v
649  *   A pointer to the atomic counter.
650  * @param dec
651  *   The value to be subtracted from the counter.
652  * @return
653  *   The value of v after the subtraction.
654  */
655 static inline int32_t
656 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
657 {
658 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
659 		rte_memory_order_seq_cst) - dec;
660 }
661 
662 /**
663  * Atomically increment a 32-bit counter by one and test.
664  *
665  * Atomically increments the atomic counter (v) by one and returns true if
666  * the result is 0, or false in all other cases.
667  *
668  * @param v
669  *   A pointer to the atomic counter.
670  * @return
671  *   True if the result after the increment operation is 0; false otherwise.
672  */
673 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
674 
675 #ifdef RTE_FORCE_INTRINSICS
676 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
677 {
678 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
679 		rte_memory_order_seq_cst) + 1 == 0;
680 }
681 #endif
682 
683 /**
684  * Atomically decrement a 32-bit counter by one and test.
685  *
686  * Atomically decrements the atomic counter (v) by one and returns true if
687  * the result is 0, or false in all other cases.
688  *
689  * @param v
690  *   A pointer to the atomic counter.
691  * @return
692  *   True if the result after the decrement operation is 0; false otherwise.
693  */
694 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
695 
696 #ifdef RTE_FORCE_INTRINSICS
697 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
698 {
699 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
700 		rte_memory_order_seq_cst) - 1 == 0;
701 }
702 #endif
703 
704 /**
705  * Atomically test and set a 32-bit atomic counter.
706  *
707  * If the counter value is already set, return 0 (failed). Otherwise, set
708  * the counter value to 1 and return 1 (success).
709  *
710  * @param v
711  *   A pointer to the atomic counter.
712  * @return
713  *   0 if failed; else 1, success.
714  */
715 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
716 
717 #ifdef RTE_FORCE_INTRINSICS
718 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
719 {
720 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
721 }
722 #endif
723 
724 /**
725  * Atomically set a 32-bit counter to 0.
726  *
727  * @param v
728  *   A pointer to the atomic counter.
729  */
730 static inline void rte_atomic32_clear(rte_atomic32_t *v)
731 {
732 	v->cnt = 0;
733 }
734 
735 /*------------------------- 64 bit atomic operations -------------------------*/
736 
737 /**
738  * An atomic compare and set function used by the mutex functions.
739  * (atomic) equivalent to:
740  *   if (*dst == exp)
741  *     *dst = src (all 64-bit words)
742  *
743  * @param dst
744  *   The destination into which the value will be written.
745  * @param exp
746  *   The expected value.
747  * @param src
748  *   The new value.
749  * @return
750  *   Non-zero on success; 0 on failure.
751  */
752 static inline int
753 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
754 
755 #ifdef RTE_FORCE_INTRINSICS
756 static inline int
757 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
758 {
759 	return __sync_bool_compare_and_swap(dst, exp, src);
760 }
761 #endif
762 
763 /**
764  * Atomic exchange.
765  *
766  * (atomic) equivalent to:
767  *   ret = *dst
768  *   *dst = val;
769  *   return ret;
770  *
771  * @param dst
772  *   The destination location into which the value will be written.
773  * @param val
774  *   The new value.
775  * @return
776  *   The original value at that location
777  */
778 static inline uint64_t
779 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
780 
781 #ifdef RTE_FORCE_INTRINSICS
782 static inline uint64_t
783 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
784 {
785 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
786 }
787 #endif
788 
789 /**
790  * The atomic counter structure.
791  */
792 typedef struct {
793 	volatile int64_t cnt;  /**< Internal counter value. */
794 } rte_atomic64_t;
795 
796 /**
797  * Static initializer for an atomic counter.
798  */
799 #define RTE_ATOMIC64_INIT(val) { (val) }
800 
801 /**
802  * Initialize the atomic counter.
803  *
804  * @param v
805  *   A pointer to the atomic counter.
806  */
807 static inline void
808 rte_atomic64_init(rte_atomic64_t *v);
809 
810 #ifdef RTE_FORCE_INTRINSICS
811 static inline void
812 rte_atomic64_init(rte_atomic64_t *v)
813 {
814 #ifdef __LP64__
815 	v->cnt = 0;
816 #else
817 	int success = 0;
818 	uint64_t tmp;
819 
820 	while (success == 0) {
821 		tmp = v->cnt;
822 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
823 		                              tmp, 0);
824 	}
825 #endif
826 }
827 #endif
828 
829 /**
830  * Atomically read a 64-bit counter.
831  *
832  * @param v
833  *   A pointer to the atomic counter.
834  * @return
835  *   The value of the counter.
836  */
837 static inline int64_t
838 rte_atomic64_read(rte_atomic64_t *v);
839 
840 #ifdef RTE_FORCE_INTRINSICS
841 static inline int64_t
842 rte_atomic64_read(rte_atomic64_t *v)
843 {
844 #ifdef __LP64__
845 	return v->cnt;
846 #else
847 	int success = 0;
848 	uint64_t tmp;
849 
850 	while (success == 0) {
851 		tmp = v->cnt;
852 		/* replace the value by itself */
853 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
854 		                              tmp, tmp);
855 	}
856 	return tmp;
857 #endif
858 }
859 #endif
860 
861 /**
862  * Atomically set a 64-bit counter.
863  *
864  * @param v
865  *   A pointer to the atomic counter.
866  * @param new_value
867  *   The new value of the counter.
868  */
869 static inline void
870 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
871 
872 #ifdef RTE_FORCE_INTRINSICS
873 static inline void
874 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
875 {
876 #ifdef __LP64__
877 	v->cnt = new_value;
878 #else
879 	int success = 0;
880 	uint64_t tmp;
881 
882 	while (success == 0) {
883 		tmp = v->cnt;
884 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
885 		                              tmp, new_value);
886 	}
887 #endif
888 }
889 #endif
890 
891 /**
892  * Atomically add a 64-bit value to a counter.
893  *
894  * @param v
895  *   A pointer to the atomic counter.
896  * @param inc
897  *   The value to be added to the counter.
898  */
899 static inline void
900 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
901 
902 #ifdef RTE_FORCE_INTRINSICS
903 static inline void
904 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
905 {
906 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
907 		rte_memory_order_seq_cst);
908 }
909 #endif
910 
911 /**
912  * Atomically subtract a 64-bit value from a counter.
913  *
914  * @param v
915  *   A pointer to the atomic counter.
916  * @param dec
917  *   The value to be subtracted from the counter.
918  */
919 static inline void
920 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
921 
922 #ifdef RTE_FORCE_INTRINSICS
923 static inline void
924 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
925 {
926 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
927 		rte_memory_order_seq_cst);
928 }
929 #endif
930 
931 /**
932  * Atomically increment a 64-bit counter by one and test.
933  *
934  * @param v
935  *   A pointer to the atomic counter.
936  */
937 static inline void
938 rte_atomic64_inc(rte_atomic64_t *v);
939 
940 #ifdef RTE_FORCE_INTRINSICS
941 static inline void
942 rte_atomic64_inc(rte_atomic64_t *v)
943 {
944 	rte_atomic64_add(v, 1);
945 }
946 #endif
947 
948 /**
949  * Atomically decrement a 64-bit counter by one and test.
950  *
951  * @param v
952  *   A pointer to the atomic counter.
953  */
954 static inline void
955 rte_atomic64_dec(rte_atomic64_t *v);
956 
957 #ifdef RTE_FORCE_INTRINSICS
958 static inline void
959 rte_atomic64_dec(rte_atomic64_t *v)
960 {
961 	rte_atomic64_sub(v, 1);
962 }
963 #endif
964 
965 /**
966  * Add a 64-bit value to an atomic counter and return the result.
967  *
968  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
969  * returns the value of v after the addition.
970  *
971  * @param v
972  *   A pointer to the atomic counter.
973  * @param inc
974  *   The value to be added to the counter.
975  * @return
976  *   The value of v after the addition.
977  */
978 static inline int64_t
979 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
980 
981 #ifdef RTE_FORCE_INTRINSICS
982 static inline int64_t
983 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
984 {
985 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
986 		rte_memory_order_seq_cst) + inc;
987 }
988 #endif
989 
990 /**
991  * Subtract a 64-bit value from an atomic counter and return the result.
992  *
993  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
994  * and returns the value of v after the subtraction.
995  *
996  * @param v
997  *   A pointer to the atomic counter.
998  * @param dec
999  *   The value to be subtracted from the counter.
1000  * @return
1001  *   The value of v after the subtraction.
1002  */
1003 static inline int64_t
1004 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1005 
1006 #ifdef RTE_FORCE_INTRINSICS
1007 static inline int64_t
1008 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1009 {
1010 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1011 		rte_memory_order_seq_cst) - dec;
1012 }
1013 #endif
1014 
1015 /**
1016  * Atomically increment a 64-bit counter by one and test.
1017  *
1018  * Atomically increments the atomic counter (v) by one and returns
1019  * true if the result is 0, or false in all other cases.
1020  *
1021  * @param v
1022  *   A pointer to the atomic counter.
1023  * @return
1024  *   True if the result after the addition is 0; false otherwise.
1025  */
1026 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1027 
1028 #ifdef RTE_FORCE_INTRINSICS
1029 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1030 {
1031 	return rte_atomic64_add_return(v, 1) == 0;
1032 }
1033 #endif
1034 
1035 /**
1036  * Atomically decrement a 64-bit counter by one and test.
1037  *
1038  * Atomically decrements the atomic counter (v) by one and returns true if
1039  * the result is 0, or false in all other cases.
1040  *
1041  * @param v
1042  *   A pointer to the atomic counter.
1043  * @return
1044  *   True if the result after subtraction is 0; false otherwise.
1045  */
1046 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1047 
1048 #ifdef RTE_FORCE_INTRINSICS
1049 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1050 {
1051 	return rte_atomic64_sub_return(v, 1) == 0;
1052 }
1053 #endif
1054 
1055 /**
1056  * Atomically test and set a 64-bit atomic counter.
1057  *
1058  * If the counter value is already set, return 0 (failed). Otherwise, set
1059  * the counter value to 1 and return 1 (success).
1060  *
1061  * @param v
1062  *   A pointer to the atomic counter.
1063  * @return
1064  *   0 if failed; else 1, success.
1065  */
1066 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1067 
1068 #ifdef RTE_FORCE_INTRINSICS
1069 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1070 {
1071 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1072 }
1073 #endif
1074 
1075 /**
1076  * Atomically set a 64-bit counter to 0.
1077  *
1078  * @param v
1079  *   A pointer to the atomic counter.
1080  */
1081 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1082 
1083 #ifdef RTE_FORCE_INTRINSICS
1084 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1085 {
1086 	rte_atomic64_set(v, 0);
1087 }
1088 #endif
1089 
1090 #endif
1091 
1092 /*------------------------ 128 bit atomic operations -------------------------*/
1093 
1094 /**
1095  * 128-bit integer structure.
1096  */
1097 typedef struct {
1098 	union {
1099 		uint64_t val[2];
1100 #ifdef RTE_ARCH_64
1101 #ifndef RTE_TOOLCHAIN_MSVC
1102 		__extension__ __int128 int128;
1103 #endif
1104 #endif
1105 	};
1106 } __rte_aligned(16) rte_int128_t;
1107 
1108 #ifdef __DOXYGEN__
1109 
1110 /**
1111  * An atomic compare and set function used by the mutex functions.
1112  * (Atomically) Equivalent to:
1113  * @code
1114  *   if (*dst == *exp)
1115  *     *dst = *src
1116  *   else
1117  *     *exp = *dst
1118  * @endcode
1119  *
1120  * @note This function is currently available for the x86-64 and aarch64
1121  * platforms.
1122  *
1123  * @note The success and failure arguments must be one of the __ATOMIC_* values
1124  * defined in the C++11 standard. For details on their behavior, refer to the
1125  * standard.
1126  *
1127  * @param dst
1128  *   The destination into which the value will be written.
1129  * @param exp
1130  *   Pointer to the expected value. If the operation fails, this memory is
1131  *   updated with the actual value.
1132  * @param src
1133  *   Pointer to the new value.
1134  * @param weak
1135  *   A value of true allows the comparison to spuriously fail and allows the
1136  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1137  *   Implementations may ignore this argument and only implement the strong
1138  *   variant.
1139  * @param success
1140  *   If successful, the operation's memory behavior conforms to this (or a
1141  *   stronger) model.
1142  * @param failure
1143  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1144  *   stronger) model. This argument cannot be rte_memory_order_release,
1145  *   rte_memory_order_acq_rel, or a stronger model than success.
1146  * @return
1147  *   Non-zero on success; 0 on failure.
1148  */
1149 static inline int
1150 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1151 			   rte_int128_t *exp,
1152 			   const rte_int128_t *src,
1153 			   unsigned int weak,
1154 			   int success,
1155 			   int failure);
1156 
1157 #endif /* __DOXYGEN__ */
1158 
1159 #endif /* _RTE_ATOMIC_H_ */
1160