xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 2dbaa926d621ed948d7779055c4726323e71c1e7)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 #include <rte_compat.h>
17 #include <rte_common.h>
18 
19 #ifdef __DOXYGEN__
20 
21 /** @name Memory Barrier
22  */
23 ///@{
24 /**
25  * General memory barrier.
26  *
27  * Guarantees that the LOAD and STORE operations generated before the
28  * barrier occur before the LOAD and STORE operations generated after.
29  */
30 static inline void rte_mb(void);
31 
32 /**
33  * Write memory barrier.
34  *
35  * Guarantees that the STORE operations generated before the barrier
36  * occur before the STORE operations generated after.
37  */
38 static inline void rte_wmb(void);
39 
40 /**
41  * Read memory barrier.
42  *
43  * Guarantees that the LOAD operations generated before the barrier
44  * occur before the LOAD operations generated after.
45  */
46 static inline void rte_rmb(void);
47 ///@}
48 
49 /** @name SMP Memory Barrier
50  */
51 ///@{
52 /**
53  * General memory barrier between lcores
54  *
55  * Guarantees that the LOAD and STORE operations that precede the
56  * rte_smp_mb() call are globally visible across the lcores
57  * before the LOAD and STORE operations that follows it.
58  *
59  * @note
60  *  This function is deprecated.
61  *  It provides similar synchronization primitive as atomic fence,
62  *  but has different syntax and memory ordering semantic. Hence
63  *  deprecated for the simplicity of memory ordering semantics in use.
64  *
65  *  rte_atomic_thread_fence(__ATOMIC_ACQ_REL) should be used instead.
66  */
67 static inline void rte_smp_mb(void);
68 
69 /**
70  * Write memory barrier between lcores
71  *
72  * Guarantees that the STORE operations that precede the
73  * rte_smp_wmb() call are globally visible across the lcores
74  * before the STORE operations that follows it.
75  *
76  * @note
77  *  This function is deprecated.
78  *  It provides similar synchronization primitive as atomic fence,
79  *  but has different syntax and memory ordering semantic. Hence
80  *  deprecated for the simplicity of memory ordering semantics in use.
81  *
82  *  rte_atomic_thread_fence(__ATOMIC_RELEASE) should be used instead.
83  *  The fence also guarantees LOAD operations that precede the call
84  *  are globally visible across the lcores before the STORE operations
85  *  that follows it.
86  */
87 static inline void rte_smp_wmb(void);
88 
89 /**
90  * Read memory barrier between lcores
91  *
92  * Guarantees that the LOAD operations that precede the
93  * rte_smp_rmb() call are globally visible across the lcores
94  * before the LOAD operations that follows it.
95  *
96  * @note
97  *  This function is deprecated.
98  *  It provides similar synchronization primitive as atomic fence,
99  *  but has different syntax and memory ordering semantic. Hence
100  *  deprecated for the simplicity of memory ordering semantics in use.
101  *
102  *  rte_atomic_thread_fence(__ATOMIC_ACQUIRE) should be used instead.
103  *  The fence also guarantees LOAD operations that precede the call
104  *  are globally visible across the lcores before the STORE operations
105  *  that follows it.
106  */
107 static inline void rte_smp_rmb(void);
108 ///@}
109 
110 /** @name I/O Memory Barrier
111  */
112 ///@{
113 /**
114  * General memory barrier for I/O device
115  *
116  * Guarantees that the LOAD and STORE operations that precede the
117  * rte_io_mb() call are visible to I/O device or CPU before the
118  * LOAD and STORE operations that follow it.
119  */
120 static inline void rte_io_mb(void);
121 
122 /**
123  * Write memory barrier for I/O device
124  *
125  * Guarantees that the STORE operations that precede the
126  * rte_io_wmb() call are visible to I/O device before the STORE
127  * operations that follow it.
128  */
129 static inline void rte_io_wmb(void);
130 
131 /**
132  * Read memory barrier for IO device
133  *
134  * Guarantees that the LOAD operations on I/O device that precede the
135  * rte_io_rmb() call are visible to CPU before the LOAD
136  * operations that follow it.
137  */
138 static inline void rte_io_rmb(void);
139 ///@}
140 
141 #endif /* __DOXYGEN__ */
142 
143 /**
144  * Compiler barrier.
145  *
146  * Guarantees that operation reordering does not occur at compile time
147  * for operations directly before and after the barrier.
148  */
149 #ifdef RTE_TOOLCHAIN_MSVC
150 #define rte_compiler_barrier() _ReadWriteBarrier()
151 #else
152 #define	rte_compiler_barrier() do {		\
153 	asm volatile ("" : : : "memory");	\
154 } while(0)
155 #endif
156 
157 /**
158  * Synchronization fence between threads based on the specified memory order.
159  */
160 static inline void rte_atomic_thread_fence(int memorder);
161 
162 /*------------------------- 16 bit atomic operations -------------------------*/
163 
164 /**
165  * Atomic compare and set.
166  *
167  * (atomic) equivalent to:
168  *   if (*dst == exp)
169  *     *dst = src (all 16-bit words)
170  *
171  * @param dst
172  *   The destination location into which the value will be written.
173  * @param exp
174  *   The expected value.
175  * @param src
176  *   The new value.
177  * @return
178  *   Non-zero on success; 0 on failure.
179  */
180 static inline int
181 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
182 
183 #ifdef RTE_FORCE_INTRINSICS
184 static inline int
185 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
186 {
187 	return __sync_bool_compare_and_swap(dst, exp, src);
188 }
189 #endif
190 
191 /**
192  * Atomic exchange.
193  *
194  * (atomic) equivalent to:
195  *   ret = *dst
196  *   *dst = val;
197  *   return ret;
198  *
199  * @param dst
200  *   The destination location into which the value will be written.
201  * @param val
202  *   The new value.
203  * @return
204  *   The original value at that location
205  */
206 static inline uint16_t
207 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
208 
209 #ifdef RTE_FORCE_INTRINSICS
210 static inline uint16_t
211 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
212 {
213 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
214 }
215 #endif
216 
217 /**
218  * The atomic counter structure.
219  */
220 typedef struct {
221 	volatile int16_t cnt; /**< An internal counter value. */
222 } rte_atomic16_t;
223 
224 /**
225  * Static initializer for an atomic counter.
226  */
227 #define RTE_ATOMIC16_INIT(val) { (val) }
228 
229 /**
230  * Initialize an atomic counter.
231  *
232  * @param v
233  *   A pointer to the atomic counter.
234  */
235 static inline void
236 rte_atomic16_init(rte_atomic16_t *v)
237 {
238 	v->cnt = 0;
239 }
240 
241 /**
242  * Atomically read a 16-bit value from a counter.
243  *
244  * @param v
245  *   A pointer to the atomic counter.
246  * @return
247  *   The value of the counter.
248  */
249 static inline int16_t
250 rte_atomic16_read(const rte_atomic16_t *v)
251 {
252 	return v->cnt;
253 }
254 
255 /**
256  * Atomically set a counter to a 16-bit value.
257  *
258  * @param v
259  *   A pointer to the atomic counter.
260  * @param new_value
261  *   The new value for the counter.
262  */
263 static inline void
264 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
265 {
266 	v->cnt = new_value;
267 }
268 
269 /**
270  * Atomically add a 16-bit value to an atomic counter.
271  *
272  * @param v
273  *   A pointer to the atomic counter.
274  * @param inc
275  *   The value to be added to the counter.
276  */
277 static inline void
278 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
279 {
280 	__atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
281 }
282 
283 /**
284  * Atomically subtract a 16-bit value from an atomic counter.
285  *
286  * @param v
287  *   A pointer to the atomic counter.
288  * @param dec
289  *   The value to be subtracted from the counter.
290  */
291 static inline void
292 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
293 {
294 	__atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
295 }
296 
297 /**
298  * Atomically increment a counter by one.
299  *
300  * @param v
301  *   A pointer to the atomic counter.
302  */
303 static inline void
304 rte_atomic16_inc(rte_atomic16_t *v);
305 
306 #ifdef RTE_FORCE_INTRINSICS
307 static inline void
308 rte_atomic16_inc(rte_atomic16_t *v)
309 {
310 	rte_atomic16_add(v, 1);
311 }
312 #endif
313 
314 /**
315  * Atomically decrement a counter by one.
316  *
317  * @param v
318  *   A pointer to the atomic counter.
319  */
320 static inline void
321 rte_atomic16_dec(rte_atomic16_t *v);
322 
323 #ifdef RTE_FORCE_INTRINSICS
324 static inline void
325 rte_atomic16_dec(rte_atomic16_t *v)
326 {
327 	rte_atomic16_sub(v, 1);
328 }
329 #endif
330 
331 /**
332  * Atomically add a 16-bit value to a counter and return the result.
333  *
334  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
335  * returns the value of v after addition.
336  *
337  * @param v
338  *   A pointer to the atomic counter.
339  * @param inc
340  *   The value to be added to the counter.
341  * @return
342  *   The value of v after the addition.
343  */
344 static inline int16_t
345 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
346 {
347 	return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
348 }
349 
350 /**
351  * Atomically subtract a 16-bit value from a counter and return
352  * the result.
353  *
354  * Atomically subtracts the 16-bit value (inc) from the atomic counter
355  * (v) and returns the value of v after the subtraction.
356  *
357  * @param v
358  *   A pointer to the atomic counter.
359  * @param dec
360  *   The value to be subtracted from the counter.
361  * @return
362  *   The value of v after the subtraction.
363  */
364 static inline int16_t
365 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
366 {
367 	return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
368 }
369 
370 /**
371  * Atomically increment a 16-bit counter by one and test.
372  *
373  * Atomically increments the atomic counter (v) by one and returns true if
374  * the result is 0, or false in all other cases.
375  *
376  * @param v
377  *   A pointer to the atomic counter.
378  * @return
379  *   True if the result after the increment operation is 0; false otherwise.
380  */
381 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
382 
383 #ifdef RTE_FORCE_INTRINSICS
384 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
385 {
386 	return __atomic_fetch_add(&v->cnt, 1, __ATOMIC_SEQ_CST) + 1 == 0;
387 }
388 #endif
389 
390 /**
391  * Atomically decrement a 16-bit counter by one and test.
392  *
393  * Atomically decrements the atomic counter (v) by one and returns true if
394  * the result is 0, or false in all other cases.
395  *
396  * @param v
397  *   A pointer to the atomic counter.
398  * @return
399  *   True if the result after the decrement operation is 0; false otherwise.
400  */
401 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
402 
403 #ifdef RTE_FORCE_INTRINSICS
404 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
405 {
406 	return __atomic_fetch_sub(&v->cnt, 1, __ATOMIC_SEQ_CST) - 1 == 0;
407 }
408 #endif
409 
410 /**
411  * Atomically test and set a 16-bit atomic counter.
412  *
413  * If the counter value is already set, return 0 (failed). Otherwise, set
414  * the counter value to 1 and return 1 (success).
415  *
416  * @param v
417  *   A pointer to the atomic counter.
418  * @return
419  *   0 if failed; else 1, success.
420  */
421 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
422 
423 #ifdef RTE_FORCE_INTRINSICS
424 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
425 {
426 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
427 }
428 #endif
429 
430 /**
431  * Atomically set a 16-bit counter to 0.
432  *
433  * @param v
434  *   A pointer to the atomic counter.
435  */
436 static inline void rte_atomic16_clear(rte_atomic16_t *v)
437 {
438 	v->cnt = 0;
439 }
440 
441 /*------------------------- 32 bit atomic operations -------------------------*/
442 
443 /**
444  * Atomic compare and set.
445  *
446  * (atomic) equivalent to:
447  *   if (*dst == exp)
448  *     *dst = src (all 32-bit words)
449  *
450  * @param dst
451  *   The destination location into which the value will be written.
452  * @param exp
453  *   The expected value.
454  * @param src
455  *   The new value.
456  * @return
457  *   Non-zero on success; 0 on failure.
458  */
459 static inline int
460 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
461 
462 #ifdef RTE_FORCE_INTRINSICS
463 static inline int
464 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
465 {
466 	return __sync_bool_compare_and_swap(dst, exp, src);
467 }
468 #endif
469 
470 /**
471  * Atomic exchange.
472  *
473  * (atomic) equivalent to:
474  *   ret = *dst
475  *   *dst = val;
476  *   return ret;
477  *
478  * @param dst
479  *   The destination location into which the value will be written.
480  * @param val
481  *   The new value.
482  * @return
483  *   The original value at that location
484  */
485 static inline uint32_t
486 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
487 
488 #ifdef RTE_FORCE_INTRINSICS
489 static inline uint32_t
490 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
491 {
492 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
493 }
494 #endif
495 
496 /**
497  * The atomic counter structure.
498  */
499 typedef struct {
500 	volatile int32_t cnt; /**< An internal counter value. */
501 } rte_atomic32_t;
502 
503 /**
504  * Static initializer for an atomic counter.
505  */
506 #define RTE_ATOMIC32_INIT(val) { (val) }
507 
508 /**
509  * Initialize an atomic counter.
510  *
511  * @param v
512  *   A pointer to the atomic counter.
513  */
514 static inline void
515 rte_atomic32_init(rte_atomic32_t *v)
516 {
517 	v->cnt = 0;
518 }
519 
520 /**
521  * Atomically read a 32-bit value from a counter.
522  *
523  * @param v
524  *   A pointer to the atomic counter.
525  * @return
526  *   The value of the counter.
527  */
528 static inline int32_t
529 rte_atomic32_read(const rte_atomic32_t *v)
530 {
531 	return v->cnt;
532 }
533 
534 /**
535  * Atomically set a counter to a 32-bit value.
536  *
537  * @param v
538  *   A pointer to the atomic counter.
539  * @param new_value
540  *   The new value for the counter.
541  */
542 static inline void
543 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
544 {
545 	v->cnt = new_value;
546 }
547 
548 /**
549  * Atomically add a 32-bit value to an atomic counter.
550  *
551  * @param v
552  *   A pointer to the atomic counter.
553  * @param inc
554  *   The value to be added to the counter.
555  */
556 static inline void
557 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
558 {
559 	__atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
560 }
561 
562 /**
563  * Atomically subtract a 32-bit value from an atomic counter.
564  *
565  * @param v
566  *   A pointer to the atomic counter.
567  * @param dec
568  *   The value to be subtracted from the counter.
569  */
570 static inline void
571 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
572 {
573 	__atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
574 }
575 
576 /**
577  * Atomically increment a counter by one.
578  *
579  * @param v
580  *   A pointer to the atomic counter.
581  */
582 static inline void
583 rte_atomic32_inc(rte_atomic32_t *v);
584 
585 #ifdef RTE_FORCE_INTRINSICS
586 static inline void
587 rte_atomic32_inc(rte_atomic32_t *v)
588 {
589 	rte_atomic32_add(v, 1);
590 }
591 #endif
592 
593 /**
594  * Atomically decrement a counter by one.
595  *
596  * @param v
597  *   A pointer to the atomic counter.
598  */
599 static inline void
600 rte_atomic32_dec(rte_atomic32_t *v);
601 
602 #ifdef RTE_FORCE_INTRINSICS
603 static inline void
604 rte_atomic32_dec(rte_atomic32_t *v)
605 {
606 	rte_atomic32_sub(v,1);
607 }
608 #endif
609 
610 /**
611  * Atomically add a 32-bit value to a counter and return the result.
612  *
613  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
614  * returns the value of v after addition.
615  *
616  * @param v
617  *   A pointer to the atomic counter.
618  * @param inc
619  *   The value to be added to the counter.
620  * @return
621  *   The value of v after the addition.
622  */
623 static inline int32_t
624 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
625 {
626 	return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
627 }
628 
629 /**
630  * Atomically subtract a 32-bit value from a counter and return
631  * the result.
632  *
633  * Atomically subtracts the 32-bit value (inc) from the atomic counter
634  * (v) and returns the value of v after the subtraction.
635  *
636  * @param v
637  *   A pointer to the atomic counter.
638  * @param dec
639  *   The value to be subtracted from the counter.
640  * @return
641  *   The value of v after the subtraction.
642  */
643 static inline int32_t
644 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
645 {
646 	return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
647 }
648 
649 /**
650  * Atomically increment a 32-bit counter by one and test.
651  *
652  * Atomically increments the atomic counter (v) by one and returns true if
653  * the result is 0, or false in all other cases.
654  *
655  * @param v
656  *   A pointer to the atomic counter.
657  * @return
658  *   True if the result after the increment operation is 0; false otherwise.
659  */
660 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
661 
662 #ifdef RTE_FORCE_INTRINSICS
663 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
664 {
665 	return __atomic_fetch_add(&v->cnt, 1, __ATOMIC_SEQ_CST) + 1 == 0;
666 }
667 #endif
668 
669 /**
670  * Atomically decrement a 32-bit counter by one and test.
671  *
672  * Atomically decrements the atomic counter (v) by one and returns true if
673  * the result is 0, or false in all other cases.
674  *
675  * @param v
676  *   A pointer to the atomic counter.
677  * @return
678  *   True if the result after the decrement operation is 0; false otherwise.
679  */
680 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
681 
682 #ifdef RTE_FORCE_INTRINSICS
683 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
684 {
685 	return __atomic_fetch_sub(&v->cnt, 1, __ATOMIC_SEQ_CST) - 1 == 0;
686 }
687 #endif
688 
689 /**
690  * Atomically test and set a 32-bit atomic counter.
691  *
692  * If the counter value is already set, return 0 (failed). Otherwise, set
693  * the counter value to 1 and return 1 (success).
694  *
695  * @param v
696  *   A pointer to the atomic counter.
697  * @return
698  *   0 if failed; else 1, success.
699  */
700 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
701 
702 #ifdef RTE_FORCE_INTRINSICS
703 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
704 {
705 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
706 }
707 #endif
708 
709 /**
710  * Atomically set a 32-bit counter to 0.
711  *
712  * @param v
713  *   A pointer to the atomic counter.
714  */
715 static inline void rte_atomic32_clear(rte_atomic32_t *v)
716 {
717 	v->cnt = 0;
718 }
719 
720 /*------------------------- 64 bit atomic operations -------------------------*/
721 
722 /**
723  * An atomic compare and set function used by the mutex functions.
724  * (atomic) equivalent to:
725  *   if (*dst == exp)
726  *     *dst = src (all 64-bit words)
727  *
728  * @param dst
729  *   The destination into which the value will be written.
730  * @param exp
731  *   The expected value.
732  * @param src
733  *   The new value.
734  * @return
735  *   Non-zero on success; 0 on failure.
736  */
737 static inline int
738 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
739 
740 #ifdef RTE_FORCE_INTRINSICS
741 static inline int
742 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
743 {
744 	return __sync_bool_compare_and_swap(dst, exp, src);
745 }
746 #endif
747 
748 /**
749  * Atomic exchange.
750  *
751  * (atomic) equivalent to:
752  *   ret = *dst
753  *   *dst = val;
754  *   return ret;
755  *
756  * @param dst
757  *   The destination location into which the value will be written.
758  * @param val
759  *   The new value.
760  * @return
761  *   The original value at that location
762  */
763 static inline uint64_t
764 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
765 
766 #ifdef RTE_FORCE_INTRINSICS
767 static inline uint64_t
768 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
769 {
770 	return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
771 }
772 #endif
773 
774 /**
775  * The atomic counter structure.
776  */
777 typedef struct {
778 	volatile int64_t cnt;  /**< Internal counter value. */
779 } rte_atomic64_t;
780 
781 /**
782  * Static initializer for an atomic counter.
783  */
784 #define RTE_ATOMIC64_INIT(val) { (val) }
785 
786 /**
787  * Initialize the atomic counter.
788  *
789  * @param v
790  *   A pointer to the atomic counter.
791  */
792 static inline void
793 rte_atomic64_init(rte_atomic64_t *v);
794 
795 #ifdef RTE_FORCE_INTRINSICS
796 static inline void
797 rte_atomic64_init(rte_atomic64_t *v)
798 {
799 #ifdef __LP64__
800 	v->cnt = 0;
801 #else
802 	int success = 0;
803 	uint64_t tmp;
804 
805 	while (success == 0) {
806 		tmp = v->cnt;
807 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
808 		                              tmp, 0);
809 	}
810 #endif
811 }
812 #endif
813 
814 /**
815  * Atomically read a 64-bit counter.
816  *
817  * @param v
818  *   A pointer to the atomic counter.
819  * @return
820  *   The value of the counter.
821  */
822 static inline int64_t
823 rte_atomic64_read(rte_atomic64_t *v);
824 
825 #ifdef RTE_FORCE_INTRINSICS
826 static inline int64_t
827 rte_atomic64_read(rte_atomic64_t *v)
828 {
829 #ifdef __LP64__
830 	return v->cnt;
831 #else
832 	int success = 0;
833 	uint64_t tmp;
834 
835 	while (success == 0) {
836 		tmp = v->cnt;
837 		/* replace the value by itself */
838 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
839 		                              tmp, tmp);
840 	}
841 	return tmp;
842 #endif
843 }
844 #endif
845 
846 /**
847  * Atomically set a 64-bit counter.
848  *
849  * @param v
850  *   A pointer to the atomic counter.
851  * @param new_value
852  *   The new value of the counter.
853  */
854 static inline void
855 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
856 
857 #ifdef RTE_FORCE_INTRINSICS
858 static inline void
859 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
860 {
861 #ifdef __LP64__
862 	v->cnt = new_value;
863 #else
864 	int success = 0;
865 	uint64_t tmp;
866 
867 	while (success == 0) {
868 		tmp = v->cnt;
869 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
870 		                              tmp, new_value);
871 	}
872 #endif
873 }
874 #endif
875 
876 /**
877  * Atomically add a 64-bit value to a counter.
878  *
879  * @param v
880  *   A pointer to the atomic counter.
881  * @param inc
882  *   The value to be added to the counter.
883  */
884 static inline void
885 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
886 
887 #ifdef RTE_FORCE_INTRINSICS
888 static inline void
889 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
890 {
891 	__atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
892 }
893 #endif
894 
895 /**
896  * Atomically subtract a 64-bit value from a counter.
897  *
898  * @param v
899  *   A pointer to the atomic counter.
900  * @param dec
901  *   The value to be subtracted from the counter.
902  */
903 static inline void
904 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
905 
906 #ifdef RTE_FORCE_INTRINSICS
907 static inline void
908 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
909 {
910 	__atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
911 }
912 #endif
913 
914 /**
915  * Atomically increment a 64-bit counter by one and test.
916  *
917  * @param v
918  *   A pointer to the atomic counter.
919  */
920 static inline void
921 rte_atomic64_inc(rte_atomic64_t *v);
922 
923 #ifdef RTE_FORCE_INTRINSICS
924 static inline void
925 rte_atomic64_inc(rte_atomic64_t *v)
926 {
927 	rte_atomic64_add(v, 1);
928 }
929 #endif
930 
931 /**
932  * Atomically decrement a 64-bit counter by one and test.
933  *
934  * @param v
935  *   A pointer to the atomic counter.
936  */
937 static inline void
938 rte_atomic64_dec(rte_atomic64_t *v);
939 
940 #ifdef RTE_FORCE_INTRINSICS
941 static inline void
942 rte_atomic64_dec(rte_atomic64_t *v)
943 {
944 	rte_atomic64_sub(v, 1);
945 }
946 #endif
947 
948 /**
949  * Add a 64-bit value to an atomic counter and return the result.
950  *
951  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
952  * returns the value of v after the addition.
953  *
954  * @param v
955  *   A pointer to the atomic counter.
956  * @param inc
957  *   The value to be added to the counter.
958  * @return
959  *   The value of v after the addition.
960  */
961 static inline int64_t
962 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
963 
964 #ifdef RTE_FORCE_INTRINSICS
965 static inline int64_t
966 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
967 {
968 	return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
969 }
970 #endif
971 
972 /**
973  * Subtract a 64-bit value from an atomic counter and return the result.
974  *
975  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
976  * and returns the value of v after the subtraction.
977  *
978  * @param v
979  *   A pointer to the atomic counter.
980  * @param dec
981  *   The value to be subtracted from the counter.
982  * @return
983  *   The value of v after the subtraction.
984  */
985 static inline int64_t
986 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
987 
988 #ifdef RTE_FORCE_INTRINSICS
989 static inline int64_t
990 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
991 {
992 	return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
993 }
994 #endif
995 
996 /**
997  * Atomically increment a 64-bit counter by one and test.
998  *
999  * Atomically increments the atomic counter (v) by one and returns
1000  * true if the result is 0, or false in all other cases.
1001  *
1002  * @param v
1003  *   A pointer to the atomic counter.
1004  * @return
1005  *   True if the result after the addition is 0; false otherwise.
1006  */
1007 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1008 
1009 #ifdef RTE_FORCE_INTRINSICS
1010 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1011 {
1012 	return rte_atomic64_add_return(v, 1) == 0;
1013 }
1014 #endif
1015 
1016 /**
1017  * Atomically decrement a 64-bit counter by one and test.
1018  *
1019  * Atomically decrements the atomic counter (v) by one and returns true if
1020  * the result is 0, or false in all other cases.
1021  *
1022  * @param v
1023  *   A pointer to the atomic counter.
1024  * @return
1025  *   True if the result after subtraction is 0; false otherwise.
1026  */
1027 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1028 
1029 #ifdef RTE_FORCE_INTRINSICS
1030 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1031 {
1032 	return rte_atomic64_sub_return(v, 1) == 0;
1033 }
1034 #endif
1035 
1036 /**
1037  * Atomically test and set a 64-bit atomic counter.
1038  *
1039  * If the counter value is already set, return 0 (failed). Otherwise, set
1040  * the counter value to 1 and return 1 (success).
1041  *
1042  * @param v
1043  *   A pointer to the atomic counter.
1044  * @return
1045  *   0 if failed; else 1, success.
1046  */
1047 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1048 
1049 #ifdef RTE_FORCE_INTRINSICS
1050 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1051 {
1052 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1053 }
1054 #endif
1055 
1056 /**
1057  * Atomically set a 64-bit counter to 0.
1058  *
1059  * @param v
1060  *   A pointer to the atomic counter.
1061  */
1062 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1063 
1064 #ifdef RTE_FORCE_INTRINSICS
1065 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1066 {
1067 	rte_atomic64_set(v, 0);
1068 }
1069 #endif
1070 
1071 /*------------------------ 128 bit atomic operations -------------------------*/
1072 
1073 /**
1074  * 128-bit integer structure.
1075  */
1076 typedef struct {
1077 	union {
1078 		uint64_t val[2];
1079 #ifdef RTE_ARCH_64
1080 		__extension__ __int128 int128;
1081 #endif
1082 	};
1083 } __rte_aligned(16) rte_int128_t;
1084 
1085 #ifdef __DOXYGEN__
1086 
1087 /**
1088  * An atomic compare and set function used by the mutex functions.
1089  * (Atomically) Equivalent to:
1090  * @code
1091  *   if (*dst == *exp)
1092  *     *dst = *src
1093  *   else
1094  *     *exp = *dst
1095  * @endcode
1096  *
1097  * @note This function is currently available for the x86-64 and aarch64
1098  * platforms.
1099  *
1100  * @note The success and failure arguments must be one of the __ATOMIC_* values
1101  * defined in the C++11 standard. For details on their behavior, refer to the
1102  * standard.
1103  *
1104  * @param dst
1105  *   The destination into which the value will be written.
1106  * @param exp
1107  *   Pointer to the expected value. If the operation fails, this memory is
1108  *   updated with the actual value.
1109  * @param src
1110  *   Pointer to the new value.
1111  * @param weak
1112  *   A value of true allows the comparison to spuriously fail and allows the
1113  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1114  *   Implementations may ignore this argument and only implement the strong
1115  *   variant.
1116  * @param success
1117  *   If successful, the operation's memory behavior conforms to this (or a
1118  *   stronger) model.
1119  * @param failure
1120  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1121  *   stronger) model. This argument cannot be __ATOMIC_RELEASE,
1122  *   __ATOMIC_ACQ_REL, or a stronger model than success.
1123  * @return
1124  *   Non-zero on success; 0 on failure.
1125  */
1126 __rte_experimental
1127 static inline int
1128 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1129 			   rte_int128_t *exp,
1130 			   const rte_int128_t *src,
1131 			   unsigned int weak,
1132 			   int success,
1133 			   int failure);
1134 
1135 #endif /* __DOXYGEN__ */
1136 
1137 #endif /* _RTE_ATOMIC_H_ */
1138