xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 665b49c51639a10c553433bc2bcd85c7331c631e)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 #include <rte_compat.h>
17 #include <rte_common.h>
18 
19 #ifdef __DOXYGEN__
20 
21 /** @name Memory Barrier
22  */
23 ///@{
24 /**
25  * General memory barrier.
26  *
27  * Guarantees that the LOAD and STORE operations generated before the
28  * barrier occur before the LOAD and STORE operations generated after.
29  */
30 static inline void rte_mb(void);
31 
32 /**
33  * Write memory barrier.
34  *
35  * Guarantees that the STORE operations generated before the barrier
36  * occur before the STORE operations generated after.
37  */
38 static inline void rte_wmb(void);
39 
40 /**
41  * Read memory barrier.
42  *
43  * Guarantees that the LOAD operations generated before the barrier
44  * occur before the LOAD operations generated after.
45  */
46 static inline void rte_rmb(void);
47 ///@}
48 
49 /** @name SMP Memory Barrier
50  */
51 ///@{
52 /**
53  * General memory barrier between lcores
54  *
55  * Guarantees that the LOAD and STORE operations that precede the
56  * rte_smp_mb() call are globally visible across the lcores
57  * before the LOAD and STORE operations that follows it.
58  */
59 static inline void rte_smp_mb(void);
60 
61 /**
62  * Write memory barrier between lcores
63  *
64  * Guarantees that the STORE operations that precede the
65  * rte_smp_wmb() call are globally visible across the lcores
66  * before the STORE operations that follows it.
67  */
68 static inline void rte_smp_wmb(void);
69 
70 /**
71  * Read memory barrier between lcores
72  *
73  * Guarantees that the LOAD operations that precede the
74  * rte_smp_rmb() call are globally visible across the lcores
75  * before the LOAD operations that follows it.
76  */
77 static inline void rte_smp_rmb(void);
78 ///@}
79 
80 /** @name I/O Memory Barrier
81  */
82 ///@{
83 /**
84  * General memory barrier for I/O device
85  *
86  * Guarantees that the LOAD and STORE operations that precede the
87  * rte_io_mb() call are visible to I/O device or CPU before the
88  * LOAD and STORE operations that follow it.
89  */
90 static inline void rte_io_mb(void);
91 
92 /**
93  * Write memory barrier for I/O device
94  *
95  * Guarantees that the STORE operations that precede the
96  * rte_io_wmb() call are visible to I/O device before the STORE
97  * operations that follow it.
98  */
99 static inline void rte_io_wmb(void);
100 
101 /**
102  * Read memory barrier for IO device
103  *
104  * Guarantees that the LOAD operations on I/O device that precede the
105  * rte_io_rmb() call are visible to CPU before the LOAD
106  * operations that follow it.
107  */
108 static inline void rte_io_rmb(void);
109 ///@}
110 
111 #endif /* __DOXYGEN__ */
112 
113 /**
114  * Compiler barrier.
115  *
116  * Guarantees that operation reordering does not occur at compile time
117  * for operations directly before and after the barrier.
118  */
119 #define	rte_compiler_barrier() do {		\
120 	asm volatile ("" : : : "memory");	\
121 } while(0)
122 
123 /**
124  * Synchronization fence between threads based on the specified memory order.
125  */
126 static inline void rte_atomic_thread_fence(int memorder);
127 
128 /*------------------------- 16 bit atomic operations -------------------------*/
129 
130 /**
131  * Atomic compare and set.
132  *
133  * (atomic) equivalent to:
134  *   if (*dst == exp)
135  *     *dst = src (all 16-bit words)
136  *
137  * @param dst
138  *   The destination location into which the value will be written.
139  * @param exp
140  *   The expected value.
141  * @param src
142  *   The new value.
143  * @return
144  *   Non-zero on success; 0 on failure.
145  */
146 static inline int
147 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148 
149 #ifdef RTE_FORCE_INTRINSICS
150 static inline int
151 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152 {
153 	return __sync_bool_compare_and_swap(dst, exp, src);
154 }
155 #endif
156 
157 /**
158  * Atomic exchange.
159  *
160  * (atomic) equivalent to:
161  *   ret = *dst
162  *   *dst = val;
163  *   return ret;
164  *
165  * @param dst
166  *   The destination location into which the value will be written.
167  * @param val
168  *   The new value.
169  * @return
170  *   The original value at that location
171  */
172 static inline uint16_t
173 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174 
175 #ifdef RTE_FORCE_INTRINSICS
176 static inline uint16_t
177 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178 {
179 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
180 }
181 #endif
182 
183 /**
184  * The atomic counter structure.
185  */
186 typedef struct {
187 	volatile int16_t cnt; /**< An internal counter value. */
188 } rte_atomic16_t;
189 
190 /**
191  * Static initializer for an atomic counter.
192  */
193 #define RTE_ATOMIC16_INIT(val) { (val) }
194 
195 /**
196  * Initialize an atomic counter.
197  *
198  * @param v
199  *   A pointer to the atomic counter.
200  */
201 static inline void
202 rte_atomic16_init(rte_atomic16_t *v)
203 {
204 	v->cnt = 0;
205 }
206 
207 /**
208  * Atomically read a 16-bit value from a counter.
209  *
210  * @param v
211  *   A pointer to the atomic counter.
212  * @return
213  *   The value of the counter.
214  */
215 static inline int16_t
216 rte_atomic16_read(const rte_atomic16_t *v)
217 {
218 	return v->cnt;
219 }
220 
221 /**
222  * Atomically set a counter to a 16-bit value.
223  *
224  * @param v
225  *   A pointer to the atomic counter.
226  * @param new_value
227  *   The new value for the counter.
228  */
229 static inline void
230 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
231 {
232 	v->cnt = new_value;
233 }
234 
235 /**
236  * Atomically add a 16-bit value to an atomic counter.
237  *
238  * @param v
239  *   A pointer to the atomic counter.
240  * @param inc
241  *   The value to be added to the counter.
242  */
243 static inline void
244 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
245 {
246 	__sync_fetch_and_add(&v->cnt, inc);
247 }
248 
249 /**
250  * Atomically subtract a 16-bit value from an atomic counter.
251  *
252  * @param v
253  *   A pointer to the atomic counter.
254  * @param dec
255  *   The value to be subtracted from the counter.
256  */
257 static inline void
258 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
259 {
260 	__sync_fetch_and_sub(&v->cnt, dec);
261 }
262 
263 /**
264  * Atomically increment a counter by one.
265  *
266  * @param v
267  *   A pointer to the atomic counter.
268  */
269 static inline void
270 rte_atomic16_inc(rte_atomic16_t *v);
271 
272 #ifdef RTE_FORCE_INTRINSICS
273 static inline void
274 rte_atomic16_inc(rte_atomic16_t *v)
275 {
276 	rte_atomic16_add(v, 1);
277 }
278 #endif
279 
280 /**
281  * Atomically decrement a counter by one.
282  *
283  * @param v
284  *   A pointer to the atomic counter.
285  */
286 static inline void
287 rte_atomic16_dec(rte_atomic16_t *v);
288 
289 #ifdef RTE_FORCE_INTRINSICS
290 static inline void
291 rte_atomic16_dec(rte_atomic16_t *v)
292 {
293 	rte_atomic16_sub(v, 1);
294 }
295 #endif
296 
297 /**
298  * Atomically add a 16-bit value to a counter and return the result.
299  *
300  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
301  * returns the value of v after addition.
302  *
303  * @param v
304  *   A pointer to the atomic counter.
305  * @param inc
306  *   The value to be added to the counter.
307  * @return
308  *   The value of v after the addition.
309  */
310 static inline int16_t
311 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
312 {
313 	return __sync_add_and_fetch(&v->cnt, inc);
314 }
315 
316 /**
317  * Atomically subtract a 16-bit value from a counter and return
318  * the result.
319  *
320  * Atomically subtracts the 16-bit value (inc) from the atomic counter
321  * (v) and returns the value of v after the subtraction.
322  *
323  * @param v
324  *   A pointer to the atomic counter.
325  * @param dec
326  *   The value to be subtracted from the counter.
327  * @return
328  *   The value of v after the subtraction.
329  */
330 static inline int16_t
331 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
332 {
333 	return __sync_sub_and_fetch(&v->cnt, dec);
334 }
335 
336 /**
337  * Atomically increment a 16-bit counter by one and test.
338  *
339  * Atomically increments the atomic counter (v) by one and returns true if
340  * the result is 0, or false in all other cases.
341  *
342  * @param v
343  *   A pointer to the atomic counter.
344  * @return
345  *   True if the result after the increment operation is 0; false otherwise.
346  */
347 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
348 
349 #ifdef RTE_FORCE_INTRINSICS
350 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
351 {
352 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
353 }
354 #endif
355 
356 /**
357  * Atomically decrement a 16-bit counter by one and test.
358  *
359  * Atomically decrements the atomic counter (v) by one and returns true if
360  * the result is 0, or false in all other cases.
361  *
362  * @param v
363  *   A pointer to the atomic counter.
364  * @return
365  *   True if the result after the decrement operation is 0; false otherwise.
366  */
367 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
368 
369 #ifdef RTE_FORCE_INTRINSICS
370 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
371 {
372 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
373 }
374 #endif
375 
376 /**
377  * Atomically test and set a 16-bit atomic counter.
378  *
379  * If the counter value is already set, return 0 (failed). Otherwise, set
380  * the counter value to 1 and return 1 (success).
381  *
382  * @param v
383  *   A pointer to the atomic counter.
384  * @return
385  *   0 if failed; else 1, success.
386  */
387 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
388 
389 #ifdef RTE_FORCE_INTRINSICS
390 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
391 {
392 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
393 }
394 #endif
395 
396 /**
397  * Atomically set a 16-bit counter to 0.
398  *
399  * @param v
400  *   A pointer to the atomic counter.
401  */
402 static inline void rte_atomic16_clear(rte_atomic16_t *v)
403 {
404 	v->cnt = 0;
405 }
406 
407 /*------------------------- 32 bit atomic operations -------------------------*/
408 
409 /**
410  * Atomic compare and set.
411  *
412  * (atomic) equivalent to:
413  *   if (*dst == exp)
414  *     *dst = src (all 32-bit words)
415  *
416  * @param dst
417  *   The destination location into which the value will be written.
418  * @param exp
419  *   The expected value.
420  * @param src
421  *   The new value.
422  * @return
423  *   Non-zero on success; 0 on failure.
424  */
425 static inline int
426 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
427 
428 #ifdef RTE_FORCE_INTRINSICS
429 static inline int
430 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
431 {
432 	return __sync_bool_compare_and_swap(dst, exp, src);
433 }
434 #endif
435 
436 /**
437  * Atomic exchange.
438  *
439  * (atomic) equivalent to:
440  *   ret = *dst
441  *   *dst = val;
442  *   return ret;
443  *
444  * @param dst
445  *   The destination location into which the value will be written.
446  * @param val
447  *   The new value.
448  * @return
449  *   The original value at that location
450  */
451 static inline uint32_t
452 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
453 
454 #ifdef RTE_FORCE_INTRINSICS
455 static inline uint32_t
456 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
457 {
458 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
459 }
460 #endif
461 
462 /**
463  * The atomic counter structure.
464  */
465 typedef struct {
466 	volatile int32_t cnt; /**< An internal counter value. */
467 } rte_atomic32_t;
468 
469 /**
470  * Static initializer for an atomic counter.
471  */
472 #define RTE_ATOMIC32_INIT(val) { (val) }
473 
474 /**
475  * Initialize an atomic counter.
476  *
477  * @param v
478  *   A pointer to the atomic counter.
479  */
480 static inline void
481 rte_atomic32_init(rte_atomic32_t *v)
482 {
483 	v->cnt = 0;
484 }
485 
486 /**
487  * Atomically read a 32-bit value from a counter.
488  *
489  * @param v
490  *   A pointer to the atomic counter.
491  * @return
492  *   The value of the counter.
493  */
494 static inline int32_t
495 rte_atomic32_read(const rte_atomic32_t *v)
496 {
497 	return v->cnt;
498 }
499 
500 /**
501  * Atomically set a counter to a 32-bit value.
502  *
503  * @param v
504  *   A pointer to the atomic counter.
505  * @param new_value
506  *   The new value for the counter.
507  */
508 static inline void
509 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
510 {
511 	v->cnt = new_value;
512 }
513 
514 /**
515  * Atomically add a 32-bit value to an atomic counter.
516  *
517  * @param v
518  *   A pointer to the atomic counter.
519  * @param inc
520  *   The value to be added to the counter.
521  */
522 static inline void
523 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
524 {
525 	__sync_fetch_and_add(&v->cnt, inc);
526 }
527 
528 /**
529  * Atomically subtract a 32-bit value from an atomic counter.
530  *
531  * @param v
532  *   A pointer to the atomic counter.
533  * @param dec
534  *   The value to be subtracted from the counter.
535  */
536 static inline void
537 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
538 {
539 	__sync_fetch_and_sub(&v->cnt, dec);
540 }
541 
542 /**
543  * Atomically increment a counter by one.
544  *
545  * @param v
546  *   A pointer to the atomic counter.
547  */
548 static inline void
549 rte_atomic32_inc(rte_atomic32_t *v);
550 
551 #ifdef RTE_FORCE_INTRINSICS
552 static inline void
553 rte_atomic32_inc(rte_atomic32_t *v)
554 {
555 	rte_atomic32_add(v, 1);
556 }
557 #endif
558 
559 /**
560  * Atomically decrement a counter by one.
561  *
562  * @param v
563  *   A pointer to the atomic counter.
564  */
565 static inline void
566 rte_atomic32_dec(rte_atomic32_t *v);
567 
568 #ifdef RTE_FORCE_INTRINSICS
569 static inline void
570 rte_atomic32_dec(rte_atomic32_t *v)
571 {
572 	rte_atomic32_sub(v,1);
573 }
574 #endif
575 
576 /**
577  * Atomically add a 32-bit value to a counter and return the result.
578  *
579  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
580  * returns the value of v after addition.
581  *
582  * @param v
583  *   A pointer to the atomic counter.
584  * @param inc
585  *   The value to be added to the counter.
586  * @return
587  *   The value of v after the addition.
588  */
589 static inline int32_t
590 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
591 {
592 	return __sync_add_and_fetch(&v->cnt, inc);
593 }
594 
595 /**
596  * Atomically subtract a 32-bit value from a counter and return
597  * the result.
598  *
599  * Atomically subtracts the 32-bit value (inc) from the atomic counter
600  * (v) and returns the value of v after the subtraction.
601  *
602  * @param v
603  *   A pointer to the atomic counter.
604  * @param dec
605  *   The value to be subtracted from the counter.
606  * @return
607  *   The value of v after the subtraction.
608  */
609 static inline int32_t
610 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
611 {
612 	return __sync_sub_and_fetch(&v->cnt, dec);
613 }
614 
615 /**
616  * Atomically increment a 32-bit counter by one and test.
617  *
618  * Atomically increments the atomic counter (v) by one and returns true if
619  * the result is 0, or false in all other cases.
620  *
621  * @param v
622  *   A pointer to the atomic counter.
623  * @return
624  *   True if the result after the increment operation is 0; false otherwise.
625  */
626 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
627 
628 #ifdef RTE_FORCE_INTRINSICS
629 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
630 {
631 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
632 }
633 #endif
634 
635 /**
636  * Atomically decrement a 32-bit counter by one and test.
637  *
638  * Atomically decrements the atomic counter (v) by one and returns true if
639  * the result is 0, or false in all other cases.
640  *
641  * @param v
642  *   A pointer to the atomic counter.
643  * @return
644  *   True if the result after the decrement operation is 0; false otherwise.
645  */
646 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
647 
648 #ifdef RTE_FORCE_INTRINSICS
649 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
650 {
651 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
652 }
653 #endif
654 
655 /**
656  * Atomically test and set a 32-bit atomic counter.
657  *
658  * If the counter value is already set, return 0 (failed). Otherwise, set
659  * the counter value to 1 and return 1 (success).
660  *
661  * @param v
662  *   A pointer to the atomic counter.
663  * @return
664  *   0 if failed; else 1, success.
665  */
666 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
667 
668 #ifdef RTE_FORCE_INTRINSICS
669 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
670 {
671 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
672 }
673 #endif
674 
675 /**
676  * Atomically set a 32-bit counter to 0.
677  *
678  * @param v
679  *   A pointer to the atomic counter.
680  */
681 static inline void rte_atomic32_clear(rte_atomic32_t *v)
682 {
683 	v->cnt = 0;
684 }
685 
686 /*------------------------- 64 bit atomic operations -------------------------*/
687 
688 /**
689  * An atomic compare and set function used by the mutex functions.
690  * (atomic) equivalent to:
691  *   if (*dst == exp)
692  *     *dst = src (all 64-bit words)
693  *
694  * @param dst
695  *   The destination into which the value will be written.
696  * @param exp
697  *   The expected value.
698  * @param src
699  *   The new value.
700  * @return
701  *   Non-zero on success; 0 on failure.
702  */
703 static inline int
704 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
705 
706 #ifdef RTE_FORCE_INTRINSICS
707 static inline int
708 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
709 {
710 	return __sync_bool_compare_and_swap(dst, exp, src);
711 }
712 #endif
713 
714 /**
715  * Atomic exchange.
716  *
717  * (atomic) equivalent to:
718  *   ret = *dst
719  *   *dst = val;
720  *   return ret;
721  *
722  * @param dst
723  *   The destination location into which the value will be written.
724  * @param val
725  *   The new value.
726  * @return
727  *   The original value at that location
728  */
729 static inline uint64_t
730 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
731 
732 #ifdef RTE_FORCE_INTRINSICS
733 static inline uint64_t
734 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
735 {
736 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
737 }
738 #endif
739 
740 /**
741  * The atomic counter structure.
742  */
743 typedef struct {
744 	volatile int64_t cnt;  /**< Internal counter value. */
745 } rte_atomic64_t;
746 
747 /**
748  * Static initializer for an atomic counter.
749  */
750 #define RTE_ATOMIC64_INIT(val) { (val) }
751 
752 /**
753  * Initialize the atomic counter.
754  *
755  * @param v
756  *   A pointer to the atomic counter.
757  */
758 static inline void
759 rte_atomic64_init(rte_atomic64_t *v);
760 
761 #ifdef RTE_FORCE_INTRINSICS
762 static inline void
763 rte_atomic64_init(rte_atomic64_t *v)
764 {
765 #ifdef __LP64__
766 	v->cnt = 0;
767 #else
768 	int success = 0;
769 	uint64_t tmp;
770 
771 	while (success == 0) {
772 		tmp = v->cnt;
773 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
774 		                              tmp, 0);
775 	}
776 #endif
777 }
778 #endif
779 
780 /**
781  * Atomically read a 64-bit counter.
782  *
783  * @param v
784  *   A pointer to the atomic counter.
785  * @return
786  *   The value of the counter.
787  */
788 static inline int64_t
789 rte_atomic64_read(rte_atomic64_t *v);
790 
791 #ifdef RTE_FORCE_INTRINSICS
792 static inline int64_t
793 rte_atomic64_read(rte_atomic64_t *v)
794 {
795 #ifdef __LP64__
796 	return v->cnt;
797 #else
798 	int success = 0;
799 	uint64_t tmp;
800 
801 	while (success == 0) {
802 		tmp = v->cnt;
803 		/* replace the value by itself */
804 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
805 		                              tmp, tmp);
806 	}
807 	return tmp;
808 #endif
809 }
810 #endif
811 
812 /**
813  * Atomically set a 64-bit counter.
814  *
815  * @param v
816  *   A pointer to the atomic counter.
817  * @param new_value
818  *   The new value of the counter.
819  */
820 static inline void
821 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
822 
823 #ifdef RTE_FORCE_INTRINSICS
824 static inline void
825 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
826 {
827 #ifdef __LP64__
828 	v->cnt = new_value;
829 #else
830 	int success = 0;
831 	uint64_t tmp;
832 
833 	while (success == 0) {
834 		tmp = v->cnt;
835 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
836 		                              tmp, new_value);
837 	}
838 #endif
839 }
840 #endif
841 
842 /**
843  * Atomically add a 64-bit value to a counter.
844  *
845  * @param v
846  *   A pointer to the atomic counter.
847  * @param inc
848  *   The value to be added to the counter.
849  */
850 static inline void
851 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
852 
853 #ifdef RTE_FORCE_INTRINSICS
854 static inline void
855 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
856 {
857 	__sync_fetch_and_add(&v->cnt, inc);
858 }
859 #endif
860 
861 /**
862  * Atomically subtract a 64-bit value from a counter.
863  *
864  * @param v
865  *   A pointer to the atomic counter.
866  * @param dec
867  *   The value to be subtracted from the counter.
868  */
869 static inline void
870 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
871 
872 #ifdef RTE_FORCE_INTRINSICS
873 static inline void
874 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
875 {
876 	__sync_fetch_and_sub(&v->cnt, dec);
877 }
878 #endif
879 
880 /**
881  * Atomically increment a 64-bit counter by one and test.
882  *
883  * @param v
884  *   A pointer to the atomic counter.
885  */
886 static inline void
887 rte_atomic64_inc(rte_atomic64_t *v);
888 
889 #ifdef RTE_FORCE_INTRINSICS
890 static inline void
891 rte_atomic64_inc(rte_atomic64_t *v)
892 {
893 	rte_atomic64_add(v, 1);
894 }
895 #endif
896 
897 /**
898  * Atomically decrement a 64-bit counter by one and test.
899  *
900  * @param v
901  *   A pointer to the atomic counter.
902  */
903 static inline void
904 rte_atomic64_dec(rte_atomic64_t *v);
905 
906 #ifdef RTE_FORCE_INTRINSICS
907 static inline void
908 rte_atomic64_dec(rte_atomic64_t *v)
909 {
910 	rte_atomic64_sub(v, 1);
911 }
912 #endif
913 
914 /**
915  * Add a 64-bit value to an atomic counter and return the result.
916  *
917  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
918  * returns the value of v after the addition.
919  *
920  * @param v
921  *   A pointer to the atomic counter.
922  * @param inc
923  *   The value to be added to the counter.
924  * @return
925  *   The value of v after the addition.
926  */
927 static inline int64_t
928 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
929 
930 #ifdef RTE_FORCE_INTRINSICS
931 static inline int64_t
932 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
933 {
934 	return __sync_add_and_fetch(&v->cnt, inc);
935 }
936 #endif
937 
938 /**
939  * Subtract a 64-bit value from an atomic counter and return the result.
940  *
941  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
942  * and returns the value of v after the subtraction.
943  *
944  * @param v
945  *   A pointer to the atomic counter.
946  * @param dec
947  *   The value to be subtracted from the counter.
948  * @return
949  *   The value of v after the subtraction.
950  */
951 static inline int64_t
952 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
953 
954 #ifdef RTE_FORCE_INTRINSICS
955 static inline int64_t
956 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
957 {
958 	return __sync_sub_and_fetch(&v->cnt, dec);
959 }
960 #endif
961 
962 /**
963  * Atomically increment a 64-bit counter by one and test.
964  *
965  * Atomically increments the atomic counter (v) by one and returns
966  * true if the result is 0, or false in all other cases.
967  *
968  * @param v
969  *   A pointer to the atomic counter.
970  * @return
971  *   True if the result after the addition is 0; false otherwise.
972  */
973 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
974 
975 #ifdef RTE_FORCE_INTRINSICS
976 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
977 {
978 	return rte_atomic64_add_return(v, 1) == 0;
979 }
980 #endif
981 
982 /**
983  * Atomically decrement a 64-bit counter by one and test.
984  *
985  * Atomically decrements the atomic counter (v) by one and returns true if
986  * the result is 0, or false in all other cases.
987  *
988  * @param v
989  *   A pointer to the atomic counter.
990  * @return
991  *   True if the result after subtraction is 0; false otherwise.
992  */
993 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
994 
995 #ifdef RTE_FORCE_INTRINSICS
996 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
997 {
998 	return rte_atomic64_sub_return(v, 1) == 0;
999 }
1000 #endif
1001 
1002 /**
1003  * Atomically test and set a 64-bit atomic counter.
1004  *
1005  * If the counter value is already set, return 0 (failed). Otherwise, set
1006  * the counter value to 1 and return 1 (success).
1007  *
1008  * @param v
1009  *   A pointer to the atomic counter.
1010  * @return
1011  *   0 if failed; else 1, success.
1012  */
1013 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1014 
1015 #ifdef RTE_FORCE_INTRINSICS
1016 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1017 {
1018 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1019 }
1020 #endif
1021 
1022 /**
1023  * Atomically set a 64-bit counter to 0.
1024  *
1025  * @param v
1026  *   A pointer to the atomic counter.
1027  */
1028 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1029 
1030 #ifdef RTE_FORCE_INTRINSICS
1031 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1032 {
1033 	rte_atomic64_set(v, 0);
1034 }
1035 #endif
1036 
1037 /*------------------------ 128 bit atomic operations -------------------------*/
1038 
1039 /**
1040  * 128-bit integer structure.
1041  */
1042 RTE_STD_C11
1043 typedef struct {
1044 	RTE_STD_C11
1045 	union {
1046 		uint64_t val[2];
1047 #ifdef RTE_ARCH_64
1048 		__extension__ __int128 int128;
1049 #endif
1050 	};
1051 } __rte_aligned(16) rte_int128_t;
1052 
1053 #ifdef __DOXYGEN__
1054 
1055 /**
1056  * An atomic compare and set function used by the mutex functions.
1057  * (Atomically) Equivalent to:
1058  * @code
1059  *   if (*dst == *exp)
1060  *     *dst = *src
1061  *   else
1062  *     *exp = *dst
1063  * @endcode
1064  *
1065  * @note This function is currently available for the x86-64 and aarch64
1066  * platforms.
1067  *
1068  * @note The success and failure arguments must be one of the __ATOMIC_* values
1069  * defined in the C++11 standard. For details on their behavior, refer to the
1070  * standard.
1071  *
1072  * @param dst
1073  *   The destination into which the value will be written.
1074  * @param exp
1075  *   Pointer to the expected value. If the operation fails, this memory is
1076  *   updated with the actual value.
1077  * @param src
1078  *   Pointer to the new value.
1079  * @param weak
1080  *   A value of true allows the comparison to spuriously fail and allows the
1081  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1082  *   Implementations may ignore this argument and only implement the strong
1083  *   variant.
1084  * @param success
1085  *   If successful, the operation's memory behavior conforms to this (or a
1086  *   stronger) model.
1087  * @param failure
1088  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1089  *   stronger) model. This argument cannot be __ATOMIC_RELEASE,
1090  *   __ATOMIC_ACQ_REL, or a stronger model than success.
1091  * @return
1092  *   Non-zero on success; 0 on failure.
1093  */
1094 __rte_experimental
1095 static inline int
1096 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1097 			   rte_int128_t *exp,
1098 			   const rte_int128_t *src,
1099 			   unsigned int weak,
1100 			   int success,
1101 			   int failure);
1102 
1103 #endif /* __DOXYGEN__ */
1104 
1105 #endif /* _RTE_ATOMIC_H_ */
1106