xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 8b8036a66e3d59ffa58afb8d96fa2c73262155a7)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
20 /** @name Memory Barrier
21  */
22 ///@{
23 /**
24  * General memory barrier.
25  *
26  * Guarantees that the LOAD and STORE operations generated before the
27  * barrier occur before the LOAD and STORE operations generated after.
28  */
29 static inline void rte_mb(void);
30 
31 /**
32  * Write memory barrier.
33  *
34  * Guarantees that the STORE operations generated before the barrier
35  * occur before the STORE operations generated after.
36  */
37 static inline void rte_wmb(void);
38 
39 /**
40  * Read memory barrier.
41  *
42  * Guarantees that the LOAD operations generated before the barrier
43  * occur before the LOAD operations generated after.
44  */
45 static inline void rte_rmb(void);
46 ///@}
47 
48 /** @name SMP Memory Barrier
49  */
50 ///@{
51 /**
52  * General memory barrier between lcores
53  *
54  * Guarantees that the LOAD and STORE operations that precede the
55  * rte_smp_mb() call are globally visible across the lcores
56  * before the LOAD and STORE operations that follows it.
57  */
58 static inline void rte_smp_mb(void);
59 
60 /**
61  * Write memory barrier between lcores
62  *
63  * Guarantees that the STORE operations that precede the
64  * rte_smp_wmb() call are globally visible across the lcores
65  * before the STORE operations that follows it.
66  */
67 static inline void rte_smp_wmb(void);
68 
69 /**
70  * Read memory barrier between lcores
71  *
72  * Guarantees that the LOAD operations that precede the
73  * rte_smp_rmb() call are globally visible across the lcores
74  * before the LOAD operations that follows it.
75  */
76 static inline void rte_smp_rmb(void);
77 ///@}
78 
79 /** @name I/O Memory Barrier
80  */
81 ///@{
82 /**
83  * General memory barrier for I/O device
84  *
85  * Guarantees that the LOAD and STORE operations that precede the
86  * rte_io_mb() call are visible to I/O device or CPU before the
87  * LOAD and STORE operations that follow it.
88  */
89 static inline void rte_io_mb(void);
90 
91 /**
92  * Write memory barrier for I/O device
93  *
94  * Guarantees that the STORE operations that precede the
95  * rte_io_wmb() call are visible to I/O device before the STORE
96  * operations that follow it.
97  */
98 static inline void rte_io_wmb(void);
99 
100 /**
101  * Read memory barrier for IO device
102  *
103  * Guarantees that the LOAD operations on I/O device that precede the
104  * rte_io_rmb() call are visible to CPU before the LOAD
105  * operations that follow it.
106  */
107 static inline void rte_io_rmb(void);
108 ///@}
109 
110 #endif /* __DOXYGEN__ */
111 
112 /**
113  * Compiler barrier.
114  *
115  * Guarantees that operation reordering does not occur at compile time
116  * for operations directly before and after the barrier.
117  */
118 #define	rte_compiler_barrier() do {		\
119 	asm volatile ("" : : : "memory");	\
120 } while(0)
121 
122 /**
123  * Synchronization fence between threads based on the specified memory order.
124  */
125 static inline void rte_atomic_thread_fence(int memorder);
126 
127 /*------------------------- 16 bit atomic operations -------------------------*/
128 
129 /**
130  * Atomic compare and set.
131  *
132  * (atomic) equivalent to:
133  *   if (*dst == exp)
134  *     *dst = src (all 16-bit words)
135  *
136  * @param dst
137  *   The destination location into which the value will be written.
138  * @param exp
139  *   The expected value.
140  * @param src
141  *   The new value.
142  * @return
143  *   Non-zero on success; 0 on failure.
144  */
145 static inline int
146 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
147 
148 #ifdef RTE_FORCE_INTRINSICS
149 static inline int
150 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
151 {
152 	return __sync_bool_compare_and_swap(dst, exp, src);
153 }
154 #endif
155 
156 /**
157  * Atomic exchange.
158  *
159  * (atomic) equivalent to:
160  *   ret = *dst
161  *   *dst = val;
162  *   return ret;
163  *
164  * @param dst
165  *   The destination location into which the value will be written.
166  * @param val
167  *   The new value.
168  * @return
169  *   The original value at that location
170  */
171 static inline uint16_t
172 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
173 
174 #ifdef RTE_FORCE_INTRINSICS
175 static inline uint16_t
176 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
177 {
178 #if defined(__clang__)
179 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
180 #else
181 	return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
182 #endif
183 }
184 #endif
185 
186 /**
187  * The atomic counter structure.
188  */
189 typedef struct {
190 	volatile int16_t cnt; /**< An internal counter value. */
191 } rte_atomic16_t;
192 
193 /**
194  * Static initializer for an atomic counter.
195  */
196 #define RTE_ATOMIC16_INIT(val) { (val) }
197 
198 /**
199  * Initialize an atomic counter.
200  *
201  * @param v
202  *   A pointer to the atomic counter.
203  */
204 static inline void
205 rte_atomic16_init(rte_atomic16_t *v)
206 {
207 	v->cnt = 0;
208 }
209 
210 /**
211  * Atomically read a 16-bit value from a counter.
212  *
213  * @param v
214  *   A pointer to the atomic counter.
215  * @return
216  *   The value of the counter.
217  */
218 static inline int16_t
219 rte_atomic16_read(const rte_atomic16_t *v)
220 {
221 	return v->cnt;
222 }
223 
224 /**
225  * Atomically set a counter to a 16-bit value.
226  *
227  * @param v
228  *   A pointer to the atomic counter.
229  * @param new_value
230  *   The new value for the counter.
231  */
232 static inline void
233 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
234 {
235 	v->cnt = new_value;
236 }
237 
238 /**
239  * Atomically add a 16-bit value to an atomic counter.
240  *
241  * @param v
242  *   A pointer to the atomic counter.
243  * @param inc
244  *   The value to be added to the counter.
245  */
246 static inline void
247 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
248 {
249 	__sync_fetch_and_add(&v->cnt, inc);
250 }
251 
252 /**
253  * Atomically subtract a 16-bit value from an atomic counter.
254  *
255  * @param v
256  *   A pointer to the atomic counter.
257  * @param dec
258  *   The value to be subtracted from the counter.
259  */
260 static inline void
261 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
262 {
263 	__sync_fetch_and_sub(&v->cnt, dec);
264 }
265 
266 /**
267  * Atomically increment a counter by one.
268  *
269  * @param v
270  *   A pointer to the atomic counter.
271  */
272 static inline void
273 rte_atomic16_inc(rte_atomic16_t *v);
274 
275 #ifdef RTE_FORCE_INTRINSICS
276 static inline void
277 rte_atomic16_inc(rte_atomic16_t *v)
278 {
279 	rte_atomic16_add(v, 1);
280 }
281 #endif
282 
283 /**
284  * Atomically decrement a counter by one.
285  *
286  * @param v
287  *   A pointer to the atomic counter.
288  */
289 static inline void
290 rte_atomic16_dec(rte_atomic16_t *v);
291 
292 #ifdef RTE_FORCE_INTRINSICS
293 static inline void
294 rte_atomic16_dec(rte_atomic16_t *v)
295 {
296 	rte_atomic16_sub(v, 1);
297 }
298 #endif
299 
300 /**
301  * Atomically add a 16-bit value to a counter and return the result.
302  *
303  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
304  * returns the value of v after addition.
305  *
306  * @param v
307  *   A pointer to the atomic counter.
308  * @param inc
309  *   The value to be added to the counter.
310  * @return
311  *   The value of v after the addition.
312  */
313 static inline int16_t
314 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
315 {
316 	return __sync_add_and_fetch(&v->cnt, inc);
317 }
318 
319 /**
320  * Atomically subtract a 16-bit value from a counter and return
321  * the result.
322  *
323  * Atomically subtracts the 16-bit value (inc) from the atomic counter
324  * (v) and returns the value of v after the subtraction.
325  *
326  * @param v
327  *   A pointer to the atomic counter.
328  * @param dec
329  *   The value to be subtracted from the counter.
330  * @return
331  *   The value of v after the subtraction.
332  */
333 static inline int16_t
334 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
335 {
336 	return __sync_sub_and_fetch(&v->cnt, dec);
337 }
338 
339 /**
340  * Atomically increment a 16-bit counter by one and test.
341  *
342  * Atomically increments the atomic counter (v) by one and returns true if
343  * the result is 0, or false in all other cases.
344  *
345  * @param v
346  *   A pointer to the atomic counter.
347  * @return
348  *   True if the result after the increment operation is 0; false otherwise.
349  */
350 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
351 
352 #ifdef RTE_FORCE_INTRINSICS
353 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
354 {
355 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
356 }
357 #endif
358 
359 /**
360  * Atomically decrement a 16-bit counter by one and test.
361  *
362  * Atomically decrements the atomic counter (v) by one and returns true if
363  * the result is 0, or false in all other cases.
364  *
365  * @param v
366  *   A pointer to the atomic counter.
367  * @return
368  *   True if the result after the decrement operation is 0; false otherwise.
369  */
370 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
371 
372 #ifdef RTE_FORCE_INTRINSICS
373 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
374 {
375 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
376 }
377 #endif
378 
379 /**
380  * Atomically test and set a 16-bit atomic counter.
381  *
382  * If the counter value is already set, return 0 (failed). Otherwise, set
383  * the counter value to 1 and return 1 (success).
384  *
385  * @param v
386  *   A pointer to the atomic counter.
387  * @return
388  *   0 if failed; else 1, success.
389  */
390 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
391 
392 #ifdef RTE_FORCE_INTRINSICS
393 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
394 {
395 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
396 }
397 #endif
398 
399 /**
400  * Atomically set a 16-bit counter to 0.
401  *
402  * @param v
403  *   A pointer to the atomic counter.
404  */
405 static inline void rte_atomic16_clear(rte_atomic16_t *v)
406 {
407 	v->cnt = 0;
408 }
409 
410 /*------------------------- 32 bit atomic operations -------------------------*/
411 
412 /**
413  * Atomic compare and set.
414  *
415  * (atomic) equivalent to:
416  *   if (*dst == exp)
417  *     *dst = src (all 32-bit words)
418  *
419  * @param dst
420  *   The destination location into which the value will be written.
421  * @param exp
422  *   The expected value.
423  * @param src
424  *   The new value.
425  * @return
426  *   Non-zero on success; 0 on failure.
427  */
428 static inline int
429 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
430 
431 #ifdef RTE_FORCE_INTRINSICS
432 static inline int
433 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
434 {
435 	return __sync_bool_compare_and_swap(dst, exp, src);
436 }
437 #endif
438 
439 /**
440  * Atomic exchange.
441  *
442  * (atomic) equivalent to:
443  *   ret = *dst
444  *   *dst = val;
445  *   return ret;
446  *
447  * @param dst
448  *   The destination location into which the value will be written.
449  * @param val
450  *   The new value.
451  * @return
452  *   The original value at that location
453  */
454 static inline uint32_t
455 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
456 
457 #ifdef RTE_FORCE_INTRINSICS
458 static inline uint32_t
459 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
460 {
461 #if defined(__clang__)
462 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
463 #else
464 	return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
465 #endif
466 }
467 #endif
468 
469 /**
470  * The atomic counter structure.
471  */
472 typedef struct {
473 	volatile int32_t cnt; /**< An internal counter value. */
474 } rte_atomic32_t;
475 
476 /**
477  * Static initializer for an atomic counter.
478  */
479 #define RTE_ATOMIC32_INIT(val) { (val) }
480 
481 /**
482  * Initialize an atomic counter.
483  *
484  * @param v
485  *   A pointer to the atomic counter.
486  */
487 static inline void
488 rte_atomic32_init(rte_atomic32_t *v)
489 {
490 	v->cnt = 0;
491 }
492 
493 /**
494  * Atomically read a 32-bit value from a counter.
495  *
496  * @param v
497  *   A pointer to the atomic counter.
498  * @return
499  *   The value of the counter.
500  */
501 static inline int32_t
502 rte_atomic32_read(const rte_atomic32_t *v)
503 {
504 	return v->cnt;
505 }
506 
507 /**
508  * Atomically set a counter to a 32-bit value.
509  *
510  * @param v
511  *   A pointer to the atomic counter.
512  * @param new_value
513  *   The new value for the counter.
514  */
515 static inline void
516 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
517 {
518 	v->cnt = new_value;
519 }
520 
521 /**
522  * Atomically add a 32-bit value to an atomic counter.
523  *
524  * @param v
525  *   A pointer to the atomic counter.
526  * @param inc
527  *   The value to be added to the counter.
528  */
529 static inline void
530 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
531 {
532 	__sync_fetch_and_add(&v->cnt, inc);
533 }
534 
535 /**
536  * Atomically subtract a 32-bit value from an atomic counter.
537  *
538  * @param v
539  *   A pointer to the atomic counter.
540  * @param dec
541  *   The value to be subtracted from the counter.
542  */
543 static inline void
544 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
545 {
546 	__sync_fetch_and_sub(&v->cnt, dec);
547 }
548 
549 /**
550  * Atomically increment a counter by one.
551  *
552  * @param v
553  *   A pointer to the atomic counter.
554  */
555 static inline void
556 rte_atomic32_inc(rte_atomic32_t *v);
557 
558 #ifdef RTE_FORCE_INTRINSICS
559 static inline void
560 rte_atomic32_inc(rte_atomic32_t *v)
561 {
562 	rte_atomic32_add(v, 1);
563 }
564 #endif
565 
566 /**
567  * Atomically decrement a counter by one.
568  *
569  * @param v
570  *   A pointer to the atomic counter.
571  */
572 static inline void
573 rte_atomic32_dec(rte_atomic32_t *v);
574 
575 #ifdef RTE_FORCE_INTRINSICS
576 static inline void
577 rte_atomic32_dec(rte_atomic32_t *v)
578 {
579 	rte_atomic32_sub(v,1);
580 }
581 #endif
582 
583 /**
584  * Atomically add a 32-bit value to a counter and return the result.
585  *
586  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
587  * returns the value of v after addition.
588  *
589  * @param v
590  *   A pointer to the atomic counter.
591  * @param inc
592  *   The value to be added to the counter.
593  * @return
594  *   The value of v after the addition.
595  */
596 static inline int32_t
597 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
598 {
599 	return __sync_add_and_fetch(&v->cnt, inc);
600 }
601 
602 /**
603  * Atomically subtract a 32-bit value from a counter and return
604  * the result.
605  *
606  * Atomically subtracts the 32-bit value (inc) from the atomic counter
607  * (v) and returns the value of v after the subtraction.
608  *
609  * @param v
610  *   A pointer to the atomic counter.
611  * @param dec
612  *   The value to be subtracted from the counter.
613  * @return
614  *   The value of v after the subtraction.
615  */
616 static inline int32_t
617 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
618 {
619 	return __sync_sub_and_fetch(&v->cnt, dec);
620 }
621 
622 /**
623  * Atomically increment a 32-bit counter by one and test.
624  *
625  * Atomically increments the atomic counter (v) by one and returns true if
626  * the result is 0, or false in all other cases.
627  *
628  * @param v
629  *   A pointer to the atomic counter.
630  * @return
631  *   True if the result after the increment operation is 0; false otherwise.
632  */
633 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
634 
635 #ifdef RTE_FORCE_INTRINSICS
636 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
637 {
638 	return __sync_add_and_fetch(&v->cnt, 1) == 0;
639 }
640 #endif
641 
642 /**
643  * Atomically decrement a 32-bit counter by one and test.
644  *
645  * Atomically decrements the atomic counter (v) by one and returns true if
646  * the result is 0, or false in all other cases.
647  *
648  * @param v
649  *   A pointer to the atomic counter.
650  * @return
651  *   True if the result after the decrement operation is 0; false otherwise.
652  */
653 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
654 
655 #ifdef RTE_FORCE_INTRINSICS
656 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
657 {
658 	return __sync_sub_and_fetch(&v->cnt, 1) == 0;
659 }
660 #endif
661 
662 /**
663  * Atomically test and set a 32-bit atomic counter.
664  *
665  * If the counter value is already set, return 0 (failed). Otherwise, set
666  * the counter value to 1 and return 1 (success).
667  *
668  * @param v
669  *   A pointer to the atomic counter.
670  * @return
671  *   0 if failed; else 1, success.
672  */
673 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
674 
675 #ifdef RTE_FORCE_INTRINSICS
676 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
677 {
678 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
679 }
680 #endif
681 
682 /**
683  * Atomically set a 32-bit counter to 0.
684  *
685  * @param v
686  *   A pointer to the atomic counter.
687  */
688 static inline void rte_atomic32_clear(rte_atomic32_t *v)
689 {
690 	v->cnt = 0;
691 }
692 
693 /*------------------------- 64 bit atomic operations -------------------------*/
694 
695 /**
696  * An atomic compare and set function used by the mutex functions.
697  * (atomic) equivalent to:
698  *   if (*dst == exp)
699  *     *dst = src (all 64-bit words)
700  *
701  * @param dst
702  *   The destination into which the value will be written.
703  * @param exp
704  *   The expected value.
705  * @param src
706  *   The new value.
707  * @return
708  *   Non-zero on success; 0 on failure.
709  */
710 static inline int
711 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
712 
713 #ifdef RTE_FORCE_INTRINSICS
714 static inline int
715 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
716 {
717 	return __sync_bool_compare_and_swap(dst, exp, src);
718 }
719 #endif
720 
721 /**
722  * Atomic exchange.
723  *
724  * (atomic) equivalent to:
725  *   ret = *dst
726  *   *dst = val;
727  *   return ret;
728  *
729  * @param dst
730  *   The destination location into which the value will be written.
731  * @param val
732  *   The new value.
733  * @return
734  *   The original value at that location
735  */
736 static inline uint64_t
737 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
738 
739 #ifdef RTE_FORCE_INTRINSICS
740 static inline uint64_t
741 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
742 {
743 #if defined(__clang__)
744 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
745 #else
746 	return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
747 #endif
748 }
749 #endif
750 
751 /**
752  * The atomic counter structure.
753  */
754 typedef struct {
755 	volatile int64_t cnt;  /**< Internal counter value. */
756 } rte_atomic64_t;
757 
758 /**
759  * Static initializer for an atomic counter.
760  */
761 #define RTE_ATOMIC64_INIT(val) { (val) }
762 
763 /**
764  * Initialize the atomic counter.
765  *
766  * @param v
767  *   A pointer to the atomic counter.
768  */
769 static inline void
770 rte_atomic64_init(rte_atomic64_t *v);
771 
772 #ifdef RTE_FORCE_INTRINSICS
773 static inline void
774 rte_atomic64_init(rte_atomic64_t *v)
775 {
776 #ifdef __LP64__
777 	v->cnt = 0;
778 #else
779 	int success = 0;
780 	uint64_t tmp;
781 
782 	while (success == 0) {
783 		tmp = v->cnt;
784 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
785 		                              tmp, 0);
786 	}
787 #endif
788 }
789 #endif
790 
791 /**
792  * Atomically read a 64-bit counter.
793  *
794  * @param v
795  *   A pointer to the atomic counter.
796  * @return
797  *   The value of the counter.
798  */
799 static inline int64_t
800 rte_atomic64_read(rte_atomic64_t *v);
801 
802 #ifdef RTE_FORCE_INTRINSICS
803 static inline int64_t
804 rte_atomic64_read(rte_atomic64_t *v)
805 {
806 #ifdef __LP64__
807 	return v->cnt;
808 #else
809 	int success = 0;
810 	uint64_t tmp;
811 
812 	while (success == 0) {
813 		tmp = v->cnt;
814 		/* replace the value by itself */
815 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
816 		                              tmp, tmp);
817 	}
818 	return tmp;
819 #endif
820 }
821 #endif
822 
823 /**
824  * Atomically set a 64-bit counter.
825  *
826  * @param v
827  *   A pointer to the atomic counter.
828  * @param new_value
829  *   The new value of the counter.
830  */
831 static inline void
832 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
833 
834 #ifdef RTE_FORCE_INTRINSICS
835 static inline void
836 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
837 {
838 #ifdef __LP64__
839 	v->cnt = new_value;
840 #else
841 	int success = 0;
842 	uint64_t tmp;
843 
844 	while (success == 0) {
845 		tmp = v->cnt;
846 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
847 		                              tmp, new_value);
848 	}
849 #endif
850 }
851 #endif
852 
853 /**
854  * Atomically add a 64-bit value to a counter.
855  *
856  * @param v
857  *   A pointer to the atomic counter.
858  * @param inc
859  *   The value to be added to the counter.
860  */
861 static inline void
862 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
863 
864 #ifdef RTE_FORCE_INTRINSICS
865 static inline void
866 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
867 {
868 	__sync_fetch_and_add(&v->cnt, inc);
869 }
870 #endif
871 
872 /**
873  * Atomically subtract a 64-bit value from a counter.
874  *
875  * @param v
876  *   A pointer to the atomic counter.
877  * @param dec
878  *   The value to be subtracted from the counter.
879  */
880 static inline void
881 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
882 
883 #ifdef RTE_FORCE_INTRINSICS
884 static inline void
885 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
886 {
887 	__sync_fetch_and_sub(&v->cnt, dec);
888 }
889 #endif
890 
891 /**
892  * Atomically increment a 64-bit counter by one and test.
893  *
894  * @param v
895  *   A pointer to the atomic counter.
896  */
897 static inline void
898 rte_atomic64_inc(rte_atomic64_t *v);
899 
900 #ifdef RTE_FORCE_INTRINSICS
901 static inline void
902 rte_atomic64_inc(rte_atomic64_t *v)
903 {
904 	rte_atomic64_add(v, 1);
905 }
906 #endif
907 
908 /**
909  * Atomically decrement a 64-bit counter by one and test.
910  *
911  * @param v
912  *   A pointer to the atomic counter.
913  */
914 static inline void
915 rte_atomic64_dec(rte_atomic64_t *v);
916 
917 #ifdef RTE_FORCE_INTRINSICS
918 static inline void
919 rte_atomic64_dec(rte_atomic64_t *v)
920 {
921 	rte_atomic64_sub(v, 1);
922 }
923 #endif
924 
925 /**
926  * Add a 64-bit value to an atomic counter and return the result.
927  *
928  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
929  * returns the value of v after the addition.
930  *
931  * @param v
932  *   A pointer to the atomic counter.
933  * @param inc
934  *   The value to be added to the counter.
935  * @return
936  *   The value of v after the addition.
937  */
938 static inline int64_t
939 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
940 
941 #ifdef RTE_FORCE_INTRINSICS
942 static inline int64_t
943 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
944 {
945 	return __sync_add_and_fetch(&v->cnt, inc);
946 }
947 #endif
948 
949 /**
950  * Subtract a 64-bit value from an atomic counter and return the result.
951  *
952  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
953  * and returns the value of v after the subtraction.
954  *
955  * @param v
956  *   A pointer to the atomic counter.
957  * @param dec
958  *   The value to be subtracted from the counter.
959  * @return
960  *   The value of v after the subtraction.
961  */
962 static inline int64_t
963 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
964 
965 #ifdef RTE_FORCE_INTRINSICS
966 static inline int64_t
967 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
968 {
969 	return __sync_sub_and_fetch(&v->cnt, dec);
970 }
971 #endif
972 
973 /**
974  * Atomically increment a 64-bit counter by one and test.
975  *
976  * Atomically increments the atomic counter (v) by one and returns
977  * true if the result is 0, or false in all other cases.
978  *
979  * @param v
980  *   A pointer to the atomic counter.
981  * @return
982  *   True if the result after the addition is 0; false otherwise.
983  */
984 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
985 
986 #ifdef RTE_FORCE_INTRINSICS
987 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
988 {
989 	return rte_atomic64_add_return(v, 1) == 0;
990 }
991 #endif
992 
993 /**
994  * Atomically decrement a 64-bit counter by one and test.
995  *
996  * Atomically decrements the atomic counter (v) by one and returns true if
997  * the result is 0, or false in all other cases.
998  *
999  * @param v
1000  *   A pointer to the atomic counter.
1001  * @return
1002  *   True if the result after subtraction is 0; false otherwise.
1003  */
1004 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1005 
1006 #ifdef RTE_FORCE_INTRINSICS
1007 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1008 {
1009 	return rte_atomic64_sub_return(v, 1) == 0;
1010 }
1011 #endif
1012 
1013 /**
1014  * Atomically test and set a 64-bit atomic counter.
1015  *
1016  * If the counter value is already set, return 0 (failed). Otherwise, set
1017  * the counter value to 1 and return 1 (success).
1018  *
1019  * @param v
1020  *   A pointer to the atomic counter.
1021  * @return
1022  *   0 if failed; else 1, success.
1023  */
1024 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1025 
1026 #ifdef RTE_FORCE_INTRINSICS
1027 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1028 {
1029 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1030 }
1031 #endif
1032 
1033 /**
1034  * Atomically set a 64-bit counter to 0.
1035  *
1036  * @param v
1037  *   A pointer to the atomic counter.
1038  */
1039 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1040 
1041 #ifdef RTE_FORCE_INTRINSICS
1042 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1043 {
1044 	rte_atomic64_set(v, 0);
1045 }
1046 #endif
1047 
1048 /*------------------------ 128 bit atomic operations -------------------------*/
1049 
1050 /**
1051  * 128-bit integer structure.
1052  */
1053 RTE_STD_C11
1054 typedef struct {
1055 	RTE_STD_C11
1056 	union {
1057 		uint64_t val[2];
1058 #ifdef RTE_ARCH_64
1059 		__extension__ __int128 int128;
1060 #endif
1061 	};
1062 } __rte_aligned(16) rte_int128_t;
1063 
1064 #ifdef __DOXYGEN__
1065 
1066 /**
1067  * An atomic compare and set function used by the mutex functions.
1068  * (Atomically) Equivalent to:
1069  * @code
1070  *   if (*dst == *exp)
1071  *     *dst = *src
1072  *   else
1073  *     *exp = *dst
1074  * @endcode
1075  *
1076  * @note This function is currently available for the x86-64 and aarch64
1077  * platforms.
1078  *
1079  * @note The success and failure arguments must be one of the __ATOMIC_* values
1080  * defined in the C++11 standard. For details on their behavior, refer to the
1081  * standard.
1082  *
1083  * @param dst
1084  *   The destination into which the value will be written.
1085  * @param exp
1086  *   Pointer to the expected value. If the operation fails, this memory is
1087  *   updated with the actual value.
1088  * @param src
1089  *   Pointer to the new value.
1090  * @param weak
1091  *   A value of true allows the comparison to spuriously fail and allows the
1092  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1093  *   Implementations may ignore this argument and only implement the strong
1094  *   variant.
1095  * @param success
1096  *   If successful, the operation's memory behavior conforms to this (or a
1097  *   stronger) model.
1098  * @param failure
1099  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1100  *   stronger) model. This argument cannot be __ATOMIC_RELEASE,
1101  *   __ATOMIC_ACQ_REL, or a stronger model than success.
1102  * @return
1103  *   Non-zero on success; 0 on failure.
1104  */
1105 __rte_experimental
1106 static inline int
1107 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1108 			   rte_int128_t *exp,
1109 			   const rte_int128_t *src,
1110 			   unsigned int weak,
1111 			   int success,
1112 			   int failure);
1113 
1114 #endif /* __DOXYGEN__ */
1115 
1116 #endif /* _RTE_ATOMIC_H_ */
1117