xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 02d36ef6a9528e0f4a3403956e66bcea5fadbf8c)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 #include <rte_compat.h>
17 #include <rte_common.h>
18 
19 #ifdef __DOXYGEN__
20 
21 /** @name Memory Barrier
22  */
23 ///@{
24 /**
25  * General memory barrier.
26  *
27  * Guarantees that the LOAD and STORE operations generated before the
28  * barrier occur before the LOAD and STORE operations generated after.
29  */
30 static inline void rte_mb(void);
31 
32 /**
33  * Write memory barrier.
34  *
35  * Guarantees that the STORE operations generated before the barrier
36  * occur before the STORE operations generated after.
37  */
38 static inline void rte_wmb(void);
39 
40 /**
41  * Read memory barrier.
42  *
43  * Guarantees that the LOAD operations generated before the barrier
44  * occur before the LOAD operations generated after.
45  */
46 static inline void rte_rmb(void);
47 ///@}
48 
49 /** @name SMP Memory Barrier
50  */
51 ///@{
52 /**
53  * General memory barrier between lcores
54  *
55  * Guarantees that the LOAD and STORE operations that precede the
56  * rte_smp_mb() call are globally visible across the lcores
57  * before the LOAD and STORE operations that follows it.
58  */
59 static inline void rte_smp_mb(void);
60 
61 /**
62  * Write memory barrier between lcores
63  *
64  * Guarantees that the STORE operations that precede the
65  * rte_smp_wmb() call are globally visible across the lcores
66  * before the STORE operations that follows it.
67  */
68 static inline void rte_smp_wmb(void);
69 
70 /**
71  * Read memory barrier between lcores
72  *
73  * Guarantees that the LOAD operations that precede the
74  * rte_smp_rmb() call are globally visible across the lcores
75  * before the LOAD operations that follows it.
76  */
77 static inline void rte_smp_rmb(void);
78 ///@}
79 
80 /** @name I/O Memory Barrier
81  */
82 ///@{
83 /**
84  * General memory barrier for I/O device
85  *
86  * Guarantees that the LOAD and STORE operations that precede the
87  * rte_io_mb() call are visible to I/O device or CPU before the
88  * LOAD and STORE operations that follow it.
89  */
90 static inline void rte_io_mb(void);
91 
92 /**
93  * Write memory barrier for I/O device
94  *
95  * Guarantees that the STORE operations that precede the
96  * rte_io_wmb() call are visible to I/O device before the STORE
97  * operations that follow it.
98  */
99 static inline void rte_io_wmb(void);
100 
101 /**
102  * Read memory barrier for IO device
103  *
104  * Guarantees that the LOAD operations on I/O device that precede the
105  * rte_io_rmb() call are visible to CPU before the LOAD
106  * operations that follow it.
107  */
108 static inline void rte_io_rmb(void);
109 ///@}
110 
111 #endif /* __DOXYGEN__ */
112 
113 /**
114  * Compiler barrier.
115  *
116  * Guarantees that operation reordering does not occur at compile time
117  * for operations directly before and after the barrier.
118  */
119 #define	rte_compiler_barrier() do {		\
120 	asm volatile ("" : : : "memory");	\
121 } while(0)
122 
123 /**
124  * Synchronization fence between threads based on the specified memory order.
125  */
126 static inline void rte_atomic_thread_fence(int memorder);
127 
128 /*------------------------- 16 bit atomic operations -------------------------*/
129 
130 /**
131  * Atomic compare and set.
132  *
133  * (atomic) equivalent to:
134  *   if (*dst == exp)
135  *     *dst = src (all 16-bit words)
136  *
137  * @param dst
138  *   The destination location into which the value will be written.
139  * @param exp
140  *   The expected value.
141  * @param src
142  *   The new value.
143  * @return
144  *   Non-zero on success; 0 on failure.
145  */
146 static inline int
147 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148 
149 #ifdef RTE_FORCE_INTRINSICS
150 static inline int
151 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152 {
153 	return __sync_bool_compare_and_swap(dst, exp, src);
154 }
155 #endif
156 
157 /**
158  * Atomic exchange.
159  *
160  * (atomic) equivalent to:
161  *   ret = *dst
162  *   *dst = val;
163  *   return ret;
164  *
165  * @param dst
166  *   The destination location into which the value will be written.
167  * @param val
168  *   The new value.
169  * @return
170  *   The original value at that location
171  */
172 static inline uint16_t
173 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174 
175 #ifdef RTE_FORCE_INTRINSICS
176 static inline uint16_t
177 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178 {
179 #if defined(__clang__)
180 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
181 #else
182 	return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
183 #endif
184 }
185 #endif
186 
187 /**
188  * The atomic counter structure.
189  */
190 typedef struct {
191 	volatile int16_t cnt; /**< An internal counter value. */
192 } rte_atomic16_t;
193 
194 /**
195  * Static initializer for an atomic counter.
196  */
197 #define RTE_ATOMIC16_INIT(val) { (val) }
198 
199 /**
200  * Initialize an atomic counter.
201  *
202  * @param v
203  *   A pointer to the atomic counter.
204  */
205 static inline void
206 rte_atomic16_init(rte_atomic16_t *v)
207 {
208 	v->cnt = 0;
209 }
210 
211 /**
212  * Atomically read a 16-bit value from a counter.
213  *
214  * @param v
215  *   A pointer to the atomic counter.
216  * @return
217  *   The value of the counter.
218  */
219 static inline int16_t
220 rte_atomic16_read(const rte_atomic16_t *v)
221 {
222 	return v->cnt;
223 }
224 
225 /**
226  * Atomically set a counter to a 16-bit value.
227  *
228  * @param v
229  *   A pointer to the atomic counter.
230  * @param new_value
231  *   The new value for the counter.
232  */
233 static inline void
234 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
235 {
236 	v->cnt = new_value;
237 }
238 
239 /**
240  * Atomically add a 16-bit value to an atomic counter.
241  *
242  * @param v
243  *   A pointer to the atomic counter.
244  * @param inc
245  *   The value to be added to the counter.
246  */
247 static inline void
248 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
249 {
250 	__sync_fetch_and_add(&v->cnt, inc);
251 }
252 
253 /**
254  * Atomically subtract a 16-bit value from an atomic counter.
255  *
256  * @param v
257  *   A pointer to the atomic counter.
258  * @param dec
259  *   The value to be subtracted from the counter.
260  */
261 static inline void
262 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
263 {
264 	__sync_fetch_and_sub(&v->cnt, dec);
265 }
266 
267 /**
268  * Atomically increment a counter by one.
269  *
270  * @param v
271  *   A pointer to the atomic counter.
272  */
273 static inline void
274 rte_atomic16_inc(rte_atomic16_t *v);
275 
276 #ifdef RTE_FORCE_INTRINSICS
277 static inline void
278 rte_atomic16_inc(rte_atomic16_t *v)
279 {
280 	rte_atomic16_add(v, 1);
281 }
282 #endif
283 
284 /**
285  * Atomically decrement a counter by one.
286  *
287  * @param v
288  *   A pointer to the atomic counter.
289  */
290 static inline void
291 rte_atomic16_dec(rte_atomic16_t *v);
292 
293 #ifdef RTE_FORCE_INTRINSICS
294 static inline void
295 rte_atomic16_dec(rte_atomic16_t *v)
296 {
297 	rte_atomic16_sub(v, 1);
298 }
299 #endif
300 
301 /**
302  * Atomically add a 16-bit value to a counter and return the result.
303  *
304  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
305  * returns the value of v after addition.
306  *
307  * @param v
308  *   A pointer to the atomic counter.
309  * @param inc
310  *   The value to be added to the counter.
311  * @return
312  *   The value of v after the addition.
313  */
314 static inline int16_t
315 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
316 {
317 	return __sync_add_and_fetch(&v->cnt, inc);
318 }
319 
320 /**
321  * Atomically subtract a 16-bit value from a counter and return
322  * the result.
323  *
324  * Atomically subtracts the 16-bit value (inc) from the atomic counter
325  * (v) and returns the value of v after the subtraction.
326  *
327  * @param v
328  *   A pointer to the atomic counter.
329  * @param dec
330  *   The value to be subtracted from the counter.
331  * @return
332  *   The value of v after the subtraction.
333  */
334 static inline int16_t
335 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
336 {
337 	return __sync_sub_and_fetch(&v->cnt, dec);
338 }
339 
340 /**
341  * Atomically increment a 16-bit counter by one and test.
342  *
343  * Atomically increments the atomic counter (v) by one and returns true if
344  * the result is 0, or false in all other cases.
345  *
346  * @param v
347  *   A pointer to the atomic counter.
348  * @return
349  *   True if the result after the increment operation is 0; false otherwise.
350  */
351 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
352 
353 #ifdef RTE_FORCE_INTRINSICS
354 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
355 {
356 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
357 }
358 #endif
359 
360 /**
361  * Atomically decrement a 16-bit counter by one and test.
362  *
363  * Atomically decrements the atomic counter (v) by one and returns true if
364  * the result is 0, or false in all other cases.
365  *
366  * @param v
367  *   A pointer to the atomic counter.
368  * @return
369  *   True if the result after the decrement operation is 0; false otherwise.
370  */
371 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
372 
373 #ifdef RTE_FORCE_INTRINSICS
374 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
375 {
376 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
377 }
378 #endif
379 
380 /**
381  * Atomically test and set a 16-bit atomic counter.
382  *
383  * If the counter value is already set, return 0 (failed). Otherwise, set
384  * the counter value to 1 and return 1 (success).
385  *
386  * @param v
387  *   A pointer to the atomic counter.
388  * @return
389  *   0 if failed; else 1, success.
390  */
391 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
392 
393 #ifdef RTE_FORCE_INTRINSICS
394 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
395 {
396 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
397 }
398 #endif
399 
400 /**
401  * Atomically set a 16-bit counter to 0.
402  *
403  * @param v
404  *   A pointer to the atomic counter.
405  */
406 static inline void rte_atomic16_clear(rte_atomic16_t *v)
407 {
408 	v->cnt = 0;
409 }
410 
411 /*------------------------- 32 bit atomic operations -------------------------*/
412 
413 /**
414  * Atomic compare and set.
415  *
416  * (atomic) equivalent to:
417  *   if (*dst == exp)
418  *     *dst = src (all 32-bit words)
419  *
420  * @param dst
421  *   The destination location into which the value will be written.
422  * @param exp
423  *   The expected value.
424  * @param src
425  *   The new value.
426  * @return
427  *   Non-zero on success; 0 on failure.
428  */
429 static inline int
430 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
431 
432 #ifdef RTE_FORCE_INTRINSICS
433 static inline int
434 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
435 {
436 	return __sync_bool_compare_and_swap(dst, exp, src);
437 }
438 #endif
439 
440 /**
441  * Atomic exchange.
442  *
443  * (atomic) equivalent to:
444  *   ret = *dst
445  *   *dst = val;
446  *   return ret;
447  *
448  * @param dst
449  *   The destination location into which the value will be written.
450  * @param val
451  *   The new value.
452  * @return
453  *   The original value at that location
454  */
455 static inline uint32_t
456 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
457 
458 #ifdef RTE_FORCE_INTRINSICS
459 static inline uint32_t
460 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
461 {
462 #if defined(__clang__)
463 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
464 #else
465 	return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
466 #endif
467 }
468 #endif
469 
470 /**
471  * The atomic counter structure.
472  */
473 typedef struct {
474 	volatile int32_t cnt; /**< An internal counter value. */
475 } rte_atomic32_t;
476 
477 /**
478  * Static initializer for an atomic counter.
479  */
480 #define RTE_ATOMIC32_INIT(val) { (val) }
481 
482 /**
483  * Initialize an atomic counter.
484  *
485  * @param v
486  *   A pointer to the atomic counter.
487  */
488 static inline void
489 rte_atomic32_init(rte_atomic32_t *v)
490 {
491 	v->cnt = 0;
492 }
493 
494 /**
495  * Atomically read a 32-bit value from a counter.
496  *
497  * @param v
498  *   A pointer to the atomic counter.
499  * @return
500  *   The value of the counter.
501  */
502 static inline int32_t
503 rte_atomic32_read(const rte_atomic32_t *v)
504 {
505 	return v->cnt;
506 }
507 
508 /**
509  * Atomically set a counter to a 32-bit value.
510  *
511  * @param v
512  *   A pointer to the atomic counter.
513  * @param new_value
514  *   The new value for the counter.
515  */
516 static inline void
517 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
518 {
519 	v->cnt = new_value;
520 }
521 
522 /**
523  * Atomically add a 32-bit value to an atomic counter.
524  *
525  * @param v
526  *   A pointer to the atomic counter.
527  * @param inc
528  *   The value to be added to the counter.
529  */
530 static inline void
531 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
532 {
533 	__sync_fetch_and_add(&v->cnt, inc);
534 }
535 
536 /**
537  * Atomically subtract a 32-bit value from an atomic counter.
538  *
539  * @param v
540  *   A pointer to the atomic counter.
541  * @param dec
542  *   The value to be subtracted from the counter.
543  */
544 static inline void
545 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
546 {
547 	__sync_fetch_and_sub(&v->cnt, dec);
548 }
549 
550 /**
551  * Atomically increment a counter by one.
552  *
553  * @param v
554  *   A pointer to the atomic counter.
555  */
556 static inline void
557 rte_atomic32_inc(rte_atomic32_t *v);
558 
559 #ifdef RTE_FORCE_INTRINSICS
560 static inline void
561 rte_atomic32_inc(rte_atomic32_t *v)
562 {
563 	rte_atomic32_add(v, 1);
564 }
565 #endif
566 
567 /**
568  * Atomically decrement a counter by one.
569  *
570  * @param v
571  *   A pointer to the atomic counter.
572  */
573 static inline void
574 rte_atomic32_dec(rte_atomic32_t *v);
575 
576 #ifdef RTE_FORCE_INTRINSICS
577 static inline void
578 rte_atomic32_dec(rte_atomic32_t *v)
579 {
580 	rte_atomic32_sub(v,1);
581 }
582 #endif
583 
584 /**
585  * Atomically add a 32-bit value to a counter and return the result.
586  *
587  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
588  * returns the value of v after addition.
589  *
590  * @param v
591  *   A pointer to the atomic counter.
592  * @param inc
593  *   The value to be added to the counter.
594  * @return
595  *   The value of v after the addition.
596  */
597 static inline int32_t
598 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
599 {
600 	return __sync_add_and_fetch(&v->cnt, inc);
601 }
602 
603 /**
604  * Atomically subtract a 32-bit value from a counter and return
605  * the result.
606  *
607  * Atomically subtracts the 32-bit value (inc) from the atomic counter
608  * (v) and returns the value of v after the subtraction.
609  *
610  * @param v
611  *   A pointer to the atomic counter.
612  * @param dec
613  *   The value to be subtracted from the counter.
614  * @return
615  *   The value of v after the subtraction.
616  */
617 static inline int32_t
618 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
619 {
620 	return __sync_sub_and_fetch(&v->cnt, dec);
621 }
622 
623 /**
624  * Atomically increment a 32-bit counter by one and test.
625  *
626  * Atomically increments the atomic counter (v) by one and returns true if
627  * the result is 0, or false in all other cases.
628  *
629  * @param v
630  *   A pointer to the atomic counter.
631  * @return
632  *   True if the result after the increment operation is 0; false otherwise.
633  */
634 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
635 
636 #ifdef RTE_FORCE_INTRINSICS
637 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
638 {
639 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
640 }
641 #endif
642 
643 /**
644  * Atomically decrement a 32-bit counter by one and test.
645  *
646  * Atomically decrements the atomic counter (v) by one and returns true if
647  * the result is 0, or false in all other cases.
648  *
649  * @param v
650  *   A pointer to the atomic counter.
651  * @return
652  *   True if the result after the decrement operation is 0; false otherwise.
653  */
654 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
655 
656 #ifdef RTE_FORCE_INTRINSICS
657 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
658 {
659 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
660 }
661 #endif
662 
663 /**
664  * Atomically test and set a 32-bit atomic counter.
665  *
666  * If the counter value is already set, return 0 (failed). Otherwise, set
667  * the counter value to 1 and return 1 (success).
668  *
669  * @param v
670  *   A pointer to the atomic counter.
671  * @return
672  *   0 if failed; else 1, success.
673  */
674 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
675 
676 #ifdef RTE_FORCE_INTRINSICS
677 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
678 {
679 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
680 }
681 #endif
682 
683 /**
684  * Atomically set a 32-bit counter to 0.
685  *
686  * @param v
687  *   A pointer to the atomic counter.
688  */
689 static inline void rte_atomic32_clear(rte_atomic32_t *v)
690 {
691 	v->cnt = 0;
692 }
693 
694 /*------------------------- 64 bit atomic operations -------------------------*/
695 
696 /**
697  * An atomic compare and set function used by the mutex functions.
698  * (atomic) equivalent to:
699  *   if (*dst == exp)
700  *     *dst = src (all 64-bit words)
701  *
702  * @param dst
703  *   The destination into which the value will be written.
704  * @param exp
705  *   The expected value.
706  * @param src
707  *   The new value.
708  * @return
709  *   Non-zero on success; 0 on failure.
710  */
711 static inline int
712 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
713 
714 #ifdef RTE_FORCE_INTRINSICS
715 static inline int
716 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
717 {
718 	return __sync_bool_compare_and_swap(dst, exp, src);
719 }
720 #endif
721 
722 /**
723  * Atomic exchange.
724  *
725  * (atomic) equivalent to:
726  *   ret = *dst
727  *   *dst = val;
728  *   return ret;
729  *
730  * @param dst
731  *   The destination location into which the value will be written.
732  * @param val
733  *   The new value.
734  * @return
735  *   The original value at that location
736  */
737 static inline uint64_t
738 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
739 
740 #ifdef RTE_FORCE_INTRINSICS
741 static inline uint64_t
742 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
743 {
744 #if defined(__clang__)
745 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
746 #else
747 	return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
748 #endif
749 }
750 #endif
751 
752 /**
753  * The atomic counter structure.
754  */
755 typedef struct {
756 	volatile int64_t cnt;  /**< Internal counter value. */
757 } rte_atomic64_t;
758 
759 /**
760  * Static initializer for an atomic counter.
761  */
762 #define RTE_ATOMIC64_INIT(val) { (val) }
763 
764 /**
765  * Initialize the atomic counter.
766  *
767  * @param v
768  *   A pointer to the atomic counter.
769  */
770 static inline void
771 rte_atomic64_init(rte_atomic64_t *v);
772 
773 #ifdef RTE_FORCE_INTRINSICS
774 static inline void
775 rte_atomic64_init(rte_atomic64_t *v)
776 {
777 #ifdef __LP64__
778 	v->cnt = 0;
779 #else
780 	int success = 0;
781 	uint64_t tmp;
782 
783 	while (success == 0) {
784 		tmp = v->cnt;
785 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
786 		                              tmp, 0);
787 	}
788 #endif
789 }
790 #endif
791 
792 /**
793  * Atomically read a 64-bit counter.
794  *
795  * @param v
796  *   A pointer to the atomic counter.
797  * @return
798  *   The value of the counter.
799  */
800 static inline int64_t
801 rte_atomic64_read(rte_atomic64_t *v);
802 
803 #ifdef RTE_FORCE_INTRINSICS
804 static inline int64_t
805 rte_atomic64_read(rte_atomic64_t *v)
806 {
807 #ifdef __LP64__
808 	return v->cnt;
809 #else
810 	int success = 0;
811 	uint64_t tmp;
812 
813 	while (success == 0) {
814 		tmp = v->cnt;
815 		/* replace the value by itself */
816 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
817 		                              tmp, tmp);
818 	}
819 	return tmp;
820 #endif
821 }
822 #endif
823 
824 /**
825  * Atomically set a 64-bit counter.
826  *
827  * @param v
828  *   A pointer to the atomic counter.
829  * @param new_value
830  *   The new value of the counter.
831  */
832 static inline void
833 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
834 
835 #ifdef RTE_FORCE_INTRINSICS
836 static inline void
837 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
838 {
839 #ifdef __LP64__
840 	v->cnt = new_value;
841 #else
842 	int success = 0;
843 	uint64_t tmp;
844 
845 	while (success == 0) {
846 		tmp = v->cnt;
847 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
848 		                              tmp, new_value);
849 	}
850 #endif
851 }
852 #endif
853 
854 /**
855  * Atomically add a 64-bit value to a counter.
856  *
857  * @param v
858  *   A pointer to the atomic counter.
859  * @param inc
860  *   The value to be added to the counter.
861  */
862 static inline void
863 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
864 
865 #ifdef RTE_FORCE_INTRINSICS
866 static inline void
867 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
868 {
869 	__sync_fetch_and_add(&v->cnt, inc);
870 }
871 #endif
872 
873 /**
874  * Atomically subtract a 64-bit value from a counter.
875  *
876  * @param v
877  *   A pointer to the atomic counter.
878  * @param dec
879  *   The value to be subtracted from the counter.
880  */
881 static inline void
882 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
883 
884 #ifdef RTE_FORCE_INTRINSICS
885 static inline void
886 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
887 {
888 	__sync_fetch_and_sub(&v->cnt, dec);
889 }
890 #endif
891 
892 /**
893  * Atomically increment a 64-bit counter by one and test.
894  *
895  * @param v
896  *   A pointer to the atomic counter.
897  */
898 static inline void
899 rte_atomic64_inc(rte_atomic64_t *v);
900 
901 #ifdef RTE_FORCE_INTRINSICS
902 static inline void
903 rte_atomic64_inc(rte_atomic64_t *v)
904 {
905 	rte_atomic64_add(v, 1);
906 }
907 #endif
908 
909 /**
910  * Atomically decrement a 64-bit counter by one and test.
911  *
912  * @param v
913  *   A pointer to the atomic counter.
914  */
915 static inline void
916 rte_atomic64_dec(rte_atomic64_t *v);
917 
918 #ifdef RTE_FORCE_INTRINSICS
919 static inline void
920 rte_atomic64_dec(rte_atomic64_t *v)
921 {
922 	rte_atomic64_sub(v, 1);
923 }
924 #endif
925 
926 /**
927  * Add a 64-bit value to an atomic counter and return the result.
928  *
929  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
930  * returns the value of v after the addition.
931  *
932  * @param v
933  *   A pointer to the atomic counter.
934  * @param inc
935  *   The value to be added to the counter.
936  * @return
937  *   The value of v after the addition.
938  */
939 static inline int64_t
940 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
941 
942 #ifdef RTE_FORCE_INTRINSICS
943 static inline int64_t
944 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
945 {
946 	return __sync_add_and_fetch(&v->cnt, inc);
947 }
948 #endif
949 
950 /**
951  * Subtract a 64-bit value from an atomic counter and return the result.
952  *
953  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
954  * and returns the value of v after the subtraction.
955  *
956  * @param v
957  *   A pointer to the atomic counter.
958  * @param dec
959  *   The value to be subtracted from the counter.
960  * @return
961  *   The value of v after the subtraction.
962  */
963 static inline int64_t
964 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
965 
966 #ifdef RTE_FORCE_INTRINSICS
967 static inline int64_t
968 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
969 {
970 	return __sync_sub_and_fetch(&v->cnt, dec);
971 }
972 #endif
973 
974 /**
975  * Atomically increment a 64-bit counter by one and test.
976  *
977  * Atomically increments the atomic counter (v) by one and returns
978  * true if the result is 0, or false in all other cases.
979  *
980  * @param v
981  *   A pointer to the atomic counter.
982  * @return
983  *   True if the result after the addition is 0; false otherwise.
984  */
985 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
986 
987 #ifdef RTE_FORCE_INTRINSICS
988 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
989 {
990 	return rte_atomic64_add_return(v, 1) == 0;
991 }
992 #endif
993 
994 /**
995  * Atomically decrement a 64-bit counter by one and test.
996  *
997  * Atomically decrements the atomic counter (v) by one and returns true if
998  * the result is 0, or false in all other cases.
999  *
1000  * @param v
1001  *   A pointer to the atomic counter.
1002  * @return
1003  *   True if the result after subtraction is 0; false otherwise.
1004  */
1005 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1006 
1007 #ifdef RTE_FORCE_INTRINSICS
1008 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1009 {
1010 	return rte_atomic64_sub_return(v, 1) == 0;
1011 }
1012 #endif
1013 
1014 /**
1015  * Atomically test and set a 64-bit atomic counter.
1016  *
1017  * If the counter value is already set, return 0 (failed). Otherwise, set
1018  * the counter value to 1 and return 1 (success).
1019  *
1020  * @param v
1021  *   A pointer to the atomic counter.
1022  * @return
1023  *   0 if failed; else 1, success.
1024  */
1025 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1026 
1027 #ifdef RTE_FORCE_INTRINSICS
1028 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1029 {
1030 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1031 }
1032 #endif
1033 
1034 /**
1035  * Atomically set a 64-bit counter to 0.
1036  *
1037  * @param v
1038  *   A pointer to the atomic counter.
1039  */
1040 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1041 
1042 #ifdef RTE_FORCE_INTRINSICS
1043 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1044 {
1045 	rte_atomic64_set(v, 0);
1046 }
1047 #endif
1048 
1049 /*------------------------ 128 bit atomic operations -------------------------*/
1050 
1051 /**
1052  * 128-bit integer structure.
1053  */
1054 RTE_STD_C11
1055 typedef struct {
1056 	RTE_STD_C11
1057 	union {
1058 		uint64_t val[2];
1059 #ifdef RTE_ARCH_64
1060 		__extension__ __int128 int128;
1061 #endif
1062 	};
1063 } __rte_aligned(16) rte_int128_t;
1064 
1065 #ifdef __DOXYGEN__
1066 
1067 /**
1068  * An atomic compare and set function used by the mutex functions.
1069  * (Atomically) Equivalent to:
1070  * @code
1071  *   if (*dst == *exp)
1072  *     *dst = *src
1073  *   else
1074  *     *exp = *dst
1075  * @endcode
1076  *
1077  * @note This function is currently available for the x86-64 and aarch64
1078  * platforms.
1079  *
1080  * @note The success and failure arguments must be one of the __ATOMIC_* values
1081  * defined in the C++11 standard. For details on their behavior, refer to the
1082  * standard.
1083  *
1084  * @param dst
1085  *   The destination into which the value will be written.
1086  * @param exp
1087  *   Pointer to the expected value. If the operation fails, this memory is
1088  *   updated with the actual value.
1089  * @param src
1090  *   Pointer to the new value.
1091  * @param weak
1092  *   A value of true allows the comparison to spuriously fail and allows the
1093  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1094  *   Implementations may ignore this argument and only implement the strong
1095  *   variant.
1096  * @param success
1097  *   If successful, the operation's memory behavior conforms to this (or a
1098  *   stronger) model.
1099  * @param failure
1100  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1101  *   stronger) model. This argument cannot be __ATOMIC_RELEASE,
1102  *   __ATOMIC_ACQ_REL, or a stronger model than success.
1103  * @return
1104  *   Non-zero on success; 0 on failure.
1105  */
1106 __rte_experimental
1107 static inline int
1108 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1109 			   rte_int128_t *exp,
1110 			   const rte_int128_t *src,
1111 			   unsigned int weak,
1112 			   int success,
1113 			   int failure);
1114 
1115 #endif /* __DOXYGEN__ */
1116 
1117 #endif /* _RTE_ATOMIC_H_ */
1118