xref: /dpdk/lib/eal/include/generic/rte_atomic.h (revision 719834a6849e1daf4a70ff7742bbcc3ae7e25607)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
8 /**
9  * @file
10  * Atomic Operations
11  *
12  * This file defines a generic API for atomic operations.
13  */
14 
15 #include <stdint.h>
16 
17 #include <rte_common.h>
18 #include <rte_stdatomic.h>
19 
20 #ifdef __cplusplus
21 extern "C" {
22 #endif
23 
24 #ifdef __DOXYGEN__
25 
26 /** @name Memory Barrier
27  */
28 ///@{
29 /**
30  * General memory barrier.
31  *
32  * Guarantees that the LOAD and STORE operations generated before the
33  * barrier occur before the LOAD and STORE operations generated after.
34  */
35 static inline void rte_mb(void);
36 
37 /**
38  * Write memory barrier.
39  *
40  * Guarantees that the STORE operations generated before the barrier
41  * occur before the STORE operations generated after.
42  */
43 static inline void rte_wmb(void);
44 
45 /**
46  * Read memory barrier.
47  *
48  * Guarantees that the LOAD operations generated before the barrier
49  * occur before the LOAD operations generated after.
50  */
51 static inline void rte_rmb(void);
52 ///@}
53 
54 /** @name SMP Memory Barrier
55  */
56 ///@{
57 /**
58  * General memory barrier between lcores
59  *
60  * Guarantees that the LOAD and STORE operations that precede the
61  * rte_smp_mb() call are globally visible across the lcores
62  * before the LOAD and STORE operations that follows it.
63  *
64  * @note
65  *  This function is deprecated.
66  *  It provides similar synchronization primitive as atomic fence,
67  *  but has different syntax and memory ordering semantic. Hence
68  *  deprecated for the simplicity of memory ordering semantics in use.
69  *
70  *  rte_atomic_thread_fence(rte_memory_order_acq_rel) should be used instead.
71  */
72 static inline void rte_smp_mb(void);
73 
74 /**
75  * Write memory barrier between lcores
76  *
77  * Guarantees that the STORE operations that precede the
78  * rte_smp_wmb() call are globally visible across the lcores
79  * before the STORE operations that follows it.
80  *
81  * @note
82  *  This function is deprecated.
83  *  It provides similar synchronization primitive as atomic fence,
84  *  but has different syntax and memory ordering semantic. Hence
85  *  deprecated for the simplicity of memory ordering semantics in use.
86  *
87  *  rte_atomic_thread_fence(rte_memory_order_release) should be used instead.
88  *  The fence also guarantees LOAD operations that precede the call
89  *  are globally visible across the lcores before the STORE operations
90  *  that follows it.
91  */
92 static inline void rte_smp_wmb(void);
93 
94 /**
95  * Read memory barrier between lcores
96  *
97  * Guarantees that the LOAD operations that precede the
98  * rte_smp_rmb() call are globally visible across the lcores
99  * before the LOAD operations that follows it.
100  *
101  * @note
102  *  This function is deprecated.
103  *  It provides similar synchronization primitive as atomic fence,
104  *  but has different syntax and memory ordering semantic. Hence
105  *  deprecated for the simplicity of memory ordering semantics in use.
106  *
107  *  rte_atomic_thread_fence(rte_memory_order_acquire) should be used instead.
108  *  The fence also guarantees LOAD operations that precede the call
109  *  are globally visible across the lcores before the STORE operations
110  *  that follows it.
111  */
112 static inline void rte_smp_rmb(void);
113 ///@}
114 
115 /** @name I/O Memory Barrier
116  */
117 ///@{
118 /**
119  * General memory barrier for I/O device
120  *
121  * Guarantees that the LOAD and STORE operations that precede the
122  * rte_io_mb() call are visible to I/O device or CPU before the
123  * LOAD and STORE operations that follow it.
124  */
125 static inline void rte_io_mb(void);
126 
127 /**
128  * Write memory barrier for I/O device
129  *
130  * Guarantees that the STORE operations that precede the
131  * rte_io_wmb() call are visible to I/O device before the STORE
132  * operations that follow it.
133  */
134 static inline void rte_io_wmb(void);
135 
136 /**
137  * Read memory barrier for IO device
138  *
139  * Guarantees that the LOAD operations on I/O device that precede the
140  * rte_io_rmb() call are visible to CPU before the LOAD
141  * operations that follow it.
142  */
143 static inline void rte_io_rmb(void);
144 ///@}
145 
146 #endif /* __DOXYGEN__ */
147 
148 /**
149  * Compiler barrier.
150  *
151  * Guarantees that operation reordering does not occur at compile time
152  * for operations directly before and after the barrier.
153  */
154 #ifdef RTE_TOOLCHAIN_MSVC
155 #define rte_compiler_barrier() _ReadWriteBarrier()
156 #else
157 #define	rte_compiler_barrier() do {		\
158 	asm volatile ("" : : : "memory");	\
159 } while(0)
160 #endif
161 
162 /**
163  * Synchronization fence between threads based on the specified memory order.
164  */
165 static inline void rte_atomic_thread_fence(rte_memory_order memorder);
166 
167 /*------------------------- 16 bit atomic operations -------------------------*/
168 
169 #ifndef RTE_TOOLCHAIN_MSVC
170 
171 /**
172  * Atomic compare and set.
173  *
174  * (atomic) equivalent to:
175  *   if (*dst == exp)
176  *     *dst = src (all 16-bit words)
177  *
178  * @param dst
179  *   The destination location into which the value will be written.
180  * @param exp
181  *   The expected value.
182  * @param src
183  *   The new value.
184  * @return
185  *   Non-zero on success; 0 on failure.
186  */
187 static inline int
188 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
189 
190 #ifdef RTE_FORCE_INTRINSICS
191 static inline int
192 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
193 {
194 	return __sync_bool_compare_and_swap(dst, exp, src);
195 }
196 #endif
197 
198 /**
199  * Atomic exchange.
200  *
201  * (atomic) equivalent to:
202  *   ret = *dst
203  *   *dst = val;
204  *   return ret;
205  *
206  * @param dst
207  *   The destination location into which the value will be written.
208  * @param val
209  *   The new value.
210  * @return
211  *   The original value at that location
212  */
213 static inline uint16_t
214 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
215 
216 #ifdef RTE_FORCE_INTRINSICS
217 static inline uint16_t
218 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
219 {
220 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
221 }
222 #endif
223 
224 /**
225  * The atomic counter structure.
226  */
227 typedef struct {
228 	volatile int16_t cnt; /**< An internal counter value. */
229 } rte_atomic16_t;
230 
231 /**
232  * Static initializer for an atomic counter.
233  */
234 #define RTE_ATOMIC16_INIT(val) { (val) }
235 
236 /**
237  * Initialize an atomic counter.
238  *
239  * @param v
240  *   A pointer to the atomic counter.
241  */
242 static inline void
243 rte_atomic16_init(rte_atomic16_t *v)
244 {
245 	v->cnt = 0;
246 }
247 
248 /**
249  * Atomically read a 16-bit value from a counter.
250  *
251  * @param v
252  *   A pointer to the atomic counter.
253  * @return
254  *   The value of the counter.
255  */
256 static inline int16_t
257 rte_atomic16_read(const rte_atomic16_t *v)
258 {
259 	return v->cnt;
260 }
261 
262 /**
263  * Atomically set a counter to a 16-bit value.
264  *
265  * @param v
266  *   A pointer to the atomic counter.
267  * @param new_value
268  *   The new value for the counter.
269  */
270 static inline void
271 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
272 {
273 	v->cnt = new_value;
274 }
275 
276 /**
277  * Atomically add a 16-bit value to an atomic counter.
278  *
279  * @param v
280  *   A pointer to the atomic counter.
281  * @param inc
282  *   The value to be added to the counter.
283  */
284 static inline void
285 rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
286 {
287 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
288 		rte_memory_order_seq_cst);
289 }
290 
291 /**
292  * Atomically subtract a 16-bit value from an atomic counter.
293  *
294  * @param v
295  *   A pointer to the atomic counter.
296  * @param dec
297  *   The value to be subtracted from the counter.
298  */
299 static inline void
300 rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
301 {
302 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
303 		rte_memory_order_seq_cst);
304 }
305 
306 /**
307  * Atomically increment a counter by one.
308  *
309  * @param v
310  *   A pointer to the atomic counter.
311  */
312 static inline void
313 rte_atomic16_inc(rte_atomic16_t *v);
314 
315 #ifdef RTE_FORCE_INTRINSICS
316 static inline void
317 rte_atomic16_inc(rte_atomic16_t *v)
318 {
319 	rte_atomic16_add(v, 1);
320 }
321 #endif
322 
323 /**
324  * Atomically decrement a counter by one.
325  *
326  * @param v
327  *   A pointer to the atomic counter.
328  */
329 static inline void
330 rte_atomic16_dec(rte_atomic16_t *v);
331 
332 #ifdef RTE_FORCE_INTRINSICS
333 static inline void
334 rte_atomic16_dec(rte_atomic16_t *v)
335 {
336 	rte_atomic16_sub(v, 1);
337 }
338 #endif
339 
340 /**
341  * Atomically add a 16-bit value to a counter and return the result.
342  *
343  * Atomically adds the 16-bits value (inc) to the atomic counter (v) and
344  * returns the value of v after addition.
345  *
346  * @param v
347  *   A pointer to the atomic counter.
348  * @param inc
349  *   The value to be added to the counter.
350  * @return
351  *   The value of v after the addition.
352  */
353 static inline int16_t
354 rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
355 {
356 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
357 		rte_memory_order_seq_cst) + inc;
358 }
359 
360 /**
361  * Atomically subtract a 16-bit value from a counter and return
362  * the result.
363  *
364  * Atomically subtracts the 16-bit value (inc) from the atomic counter
365  * (v) and returns the value of v after the subtraction.
366  *
367  * @param v
368  *   A pointer to the atomic counter.
369  * @param dec
370  *   The value to be subtracted from the counter.
371  * @return
372  *   The value of v after the subtraction.
373  */
374 static inline int16_t
375 rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
376 {
377 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
378 		rte_memory_order_seq_cst) - dec;
379 }
380 
381 /**
382  * Atomically increment a 16-bit counter by one and test.
383  *
384  * Atomically increments the atomic counter (v) by one and returns true if
385  * the result is 0, or false in all other cases.
386  *
387  * @param v
388  *   A pointer to the atomic counter.
389  * @return
390  *   True if the result after the increment operation is 0; false otherwise.
391  */
392 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
393 
394 #ifdef RTE_FORCE_INTRINSICS
395 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
396 {
397 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
398 		rte_memory_order_seq_cst) + 1 == 0;
399 }
400 #endif
401 
402 /**
403  * Atomically decrement a 16-bit counter by one and test.
404  *
405  * Atomically decrements the atomic counter (v) by one and returns true if
406  * the result is 0, or false in all other cases.
407  *
408  * @param v
409  *   A pointer to the atomic counter.
410  * @return
411  *   True if the result after the decrement operation is 0; false otherwise.
412  */
413 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
414 
415 #ifdef RTE_FORCE_INTRINSICS
416 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
417 {
418 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
419 		rte_memory_order_seq_cst) - 1 == 0;
420 }
421 #endif
422 
423 /**
424  * Atomically test and set a 16-bit atomic counter.
425  *
426  * If the counter value is already set, return 0 (failed). Otherwise, set
427  * the counter value to 1 and return 1 (success).
428  *
429  * @param v
430  *   A pointer to the atomic counter.
431  * @return
432  *   0 if failed; else 1, success.
433  */
434 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
435 
436 #ifdef RTE_FORCE_INTRINSICS
437 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
438 {
439 	return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
440 }
441 #endif
442 
443 /**
444  * Atomically set a 16-bit counter to 0.
445  *
446  * @param v
447  *   A pointer to the atomic counter.
448  */
449 static inline void rte_atomic16_clear(rte_atomic16_t *v)
450 {
451 	v->cnt = 0;
452 }
453 
454 /*------------------------- 32 bit atomic operations -------------------------*/
455 
456 /**
457  * Atomic compare and set.
458  *
459  * (atomic) equivalent to:
460  *   if (*dst == exp)
461  *     *dst = src (all 32-bit words)
462  *
463  * @param dst
464  *   The destination location into which the value will be written.
465  * @param exp
466  *   The expected value.
467  * @param src
468  *   The new value.
469  * @return
470  *   Non-zero on success; 0 on failure.
471  */
472 static inline int
473 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
474 
475 #ifdef RTE_FORCE_INTRINSICS
476 static inline int
477 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
478 {
479 	return __sync_bool_compare_and_swap(dst, exp, src);
480 }
481 #endif
482 
483 /**
484  * Atomic exchange.
485  *
486  * (atomic) equivalent to:
487  *   ret = *dst
488  *   *dst = val;
489  *   return ret;
490  *
491  * @param dst
492  *   The destination location into which the value will be written.
493  * @param val
494  *   The new value.
495  * @return
496  *   The original value at that location
497  */
498 static inline uint32_t
499 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
500 
501 #ifdef RTE_FORCE_INTRINSICS
502 static inline uint32_t
503 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
504 {
505 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
506 }
507 #endif
508 
509 /**
510  * The atomic counter structure.
511  */
512 typedef struct {
513 	volatile int32_t cnt; /**< An internal counter value. */
514 } rte_atomic32_t;
515 
516 /**
517  * Static initializer for an atomic counter.
518  */
519 #define RTE_ATOMIC32_INIT(val) { (val) }
520 
521 /**
522  * Initialize an atomic counter.
523  *
524  * @param v
525  *   A pointer to the atomic counter.
526  */
527 static inline void
528 rte_atomic32_init(rte_atomic32_t *v)
529 {
530 	v->cnt = 0;
531 }
532 
533 /**
534  * Atomically read a 32-bit value from a counter.
535  *
536  * @param v
537  *   A pointer to the atomic counter.
538  * @return
539  *   The value of the counter.
540  */
541 static inline int32_t
542 rte_atomic32_read(const rte_atomic32_t *v)
543 {
544 	return v->cnt;
545 }
546 
547 /**
548  * Atomically set a counter to a 32-bit value.
549  *
550  * @param v
551  *   A pointer to the atomic counter.
552  * @param new_value
553  *   The new value for the counter.
554  */
555 static inline void
556 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
557 {
558 	v->cnt = new_value;
559 }
560 
561 /**
562  * Atomically add a 32-bit value to an atomic counter.
563  *
564  * @param v
565  *   A pointer to the atomic counter.
566  * @param inc
567  *   The value to be added to the counter.
568  */
569 static inline void
570 rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
571 {
572 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
573 		rte_memory_order_seq_cst);
574 }
575 
576 /**
577  * Atomically subtract a 32-bit value from an atomic counter.
578  *
579  * @param v
580  *   A pointer to the atomic counter.
581  * @param dec
582  *   The value to be subtracted from the counter.
583  */
584 static inline void
585 rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
586 {
587 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
588 		rte_memory_order_seq_cst);
589 }
590 
591 /**
592  * Atomically increment a counter by one.
593  *
594  * @param v
595  *   A pointer to the atomic counter.
596  */
597 static inline void
598 rte_atomic32_inc(rte_atomic32_t *v);
599 
600 #ifdef RTE_FORCE_INTRINSICS
601 static inline void
602 rte_atomic32_inc(rte_atomic32_t *v)
603 {
604 	rte_atomic32_add(v, 1);
605 }
606 #endif
607 
608 /**
609  * Atomically decrement a counter by one.
610  *
611  * @param v
612  *   A pointer to the atomic counter.
613  */
614 static inline void
615 rte_atomic32_dec(rte_atomic32_t *v);
616 
617 #ifdef RTE_FORCE_INTRINSICS
618 static inline void
619 rte_atomic32_dec(rte_atomic32_t *v)
620 {
621 	rte_atomic32_sub(v,1);
622 }
623 #endif
624 
625 /**
626  * Atomically add a 32-bit value to a counter and return the result.
627  *
628  * Atomically adds the 32-bits value (inc) to the atomic counter (v) and
629  * returns the value of v after addition.
630  *
631  * @param v
632  *   A pointer to the atomic counter.
633  * @param inc
634  *   The value to be added to the counter.
635  * @return
636  *   The value of v after the addition.
637  */
638 static inline int32_t
639 rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
640 {
641 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
642 		rte_memory_order_seq_cst) + inc;
643 }
644 
645 /**
646  * Atomically subtract a 32-bit value from a counter and return
647  * the result.
648  *
649  * Atomically subtracts the 32-bit value (inc) from the atomic counter
650  * (v) and returns the value of v after the subtraction.
651  *
652  * @param v
653  *   A pointer to the atomic counter.
654  * @param dec
655  *   The value to be subtracted from the counter.
656  * @return
657  *   The value of v after the subtraction.
658  */
659 static inline int32_t
660 rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
661 {
662 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
663 		rte_memory_order_seq_cst) - dec;
664 }
665 
666 /**
667  * Atomically increment a 32-bit counter by one and test.
668  *
669  * Atomically increments the atomic counter (v) by one and returns true if
670  * the result is 0, or false in all other cases.
671  *
672  * @param v
673  *   A pointer to the atomic counter.
674  * @return
675  *   True if the result after the increment operation is 0; false otherwise.
676  */
677 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
678 
679 #ifdef RTE_FORCE_INTRINSICS
680 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
681 {
682 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
683 		rte_memory_order_seq_cst) + 1 == 0;
684 }
685 #endif
686 
687 /**
688  * Atomically decrement a 32-bit counter by one and test.
689  *
690  * Atomically decrements the atomic counter (v) by one and returns true if
691  * the result is 0, or false in all other cases.
692  *
693  * @param v
694  *   A pointer to the atomic counter.
695  * @return
696  *   True if the result after the decrement operation is 0; false otherwise.
697  */
698 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
699 
700 #ifdef RTE_FORCE_INTRINSICS
701 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
702 {
703 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
704 		rte_memory_order_seq_cst) - 1 == 0;
705 }
706 #endif
707 
708 /**
709  * Atomically test and set a 32-bit atomic counter.
710  *
711  * If the counter value is already set, return 0 (failed). Otherwise, set
712  * the counter value to 1 and return 1 (success).
713  *
714  * @param v
715  *   A pointer to the atomic counter.
716  * @return
717  *   0 if failed; else 1, success.
718  */
719 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
720 
721 #ifdef RTE_FORCE_INTRINSICS
722 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
723 {
724 	return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
725 }
726 #endif
727 
728 /**
729  * Atomically set a 32-bit counter to 0.
730  *
731  * @param v
732  *   A pointer to the atomic counter.
733  */
734 static inline void rte_atomic32_clear(rte_atomic32_t *v)
735 {
736 	v->cnt = 0;
737 }
738 
739 /*------------------------- 64 bit atomic operations -------------------------*/
740 
741 /**
742  * An atomic compare and set function used by the mutex functions.
743  * (atomic) equivalent to:
744  *   if (*dst == exp)
745  *     *dst = src (all 64-bit words)
746  *
747  * @param dst
748  *   The destination into which the value will be written.
749  * @param exp
750  *   The expected value.
751  * @param src
752  *   The new value.
753  * @return
754  *   Non-zero on success; 0 on failure.
755  */
756 static inline int
757 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
758 
759 #ifdef RTE_FORCE_INTRINSICS
760 static inline int
761 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
762 {
763 	return __sync_bool_compare_and_swap(dst, exp, src);
764 }
765 #endif
766 
767 /**
768  * Atomic exchange.
769  *
770  * (atomic) equivalent to:
771  *   ret = *dst
772  *   *dst = val;
773  *   return ret;
774  *
775  * @param dst
776  *   The destination location into which the value will be written.
777  * @param val
778  *   The new value.
779  * @return
780  *   The original value at that location
781  */
782 static inline uint64_t
783 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
784 
785 #ifdef RTE_FORCE_INTRINSICS
786 static inline uint64_t
787 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
788 {
789 	return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
790 }
791 #endif
792 
793 /**
794  * The atomic counter structure.
795  */
796 typedef struct {
797 	volatile int64_t cnt;  /**< Internal counter value. */
798 } rte_atomic64_t;
799 
800 /**
801  * Static initializer for an atomic counter.
802  */
803 #define RTE_ATOMIC64_INIT(val) { (val) }
804 
805 /**
806  * Initialize the atomic counter.
807  *
808  * @param v
809  *   A pointer to the atomic counter.
810  */
811 static inline void
812 rte_atomic64_init(rte_atomic64_t *v);
813 
814 #ifdef RTE_FORCE_INTRINSICS
815 static inline void
816 rte_atomic64_init(rte_atomic64_t *v)
817 {
818 #ifdef __LP64__
819 	v->cnt = 0;
820 #else
821 	int success = 0;
822 	uint64_t tmp;
823 
824 	while (success == 0) {
825 		tmp = v->cnt;
826 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
827 		                              tmp, 0);
828 	}
829 #endif
830 }
831 #endif
832 
833 /**
834  * Atomically read a 64-bit counter.
835  *
836  * @param v
837  *   A pointer to the atomic counter.
838  * @return
839  *   The value of the counter.
840  */
841 static inline int64_t
842 rte_atomic64_read(rte_atomic64_t *v);
843 
844 #ifdef RTE_FORCE_INTRINSICS
845 static inline int64_t
846 rte_atomic64_read(rte_atomic64_t *v)
847 {
848 #ifdef __LP64__
849 	return v->cnt;
850 #else
851 	int success = 0;
852 	uint64_t tmp;
853 
854 	while (success == 0) {
855 		tmp = v->cnt;
856 		/* replace the value by itself */
857 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
858 		                              tmp, tmp);
859 	}
860 	return tmp;
861 #endif
862 }
863 #endif
864 
865 /**
866  * Atomically set a 64-bit counter.
867  *
868  * @param v
869  *   A pointer to the atomic counter.
870  * @param new_value
871  *   The new value of the counter.
872  */
873 static inline void
874 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
875 
876 #ifdef RTE_FORCE_INTRINSICS
877 static inline void
878 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
879 {
880 #ifdef __LP64__
881 	v->cnt = new_value;
882 #else
883 	int success = 0;
884 	uint64_t tmp;
885 
886 	while (success == 0) {
887 		tmp = v->cnt;
888 		success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
889 		                              tmp, new_value);
890 	}
891 #endif
892 }
893 #endif
894 
895 /**
896  * Atomically add a 64-bit value to a counter.
897  *
898  * @param v
899  *   A pointer to the atomic counter.
900  * @param inc
901  *   The value to be added to the counter.
902  */
903 static inline void
904 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
905 
906 #ifdef RTE_FORCE_INTRINSICS
907 static inline void
908 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
909 {
910 	rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
911 		rte_memory_order_seq_cst);
912 }
913 #endif
914 
915 /**
916  * Atomically subtract a 64-bit value from a counter.
917  *
918  * @param v
919  *   A pointer to the atomic counter.
920  * @param dec
921  *   The value to be subtracted from the counter.
922  */
923 static inline void
924 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
925 
926 #ifdef RTE_FORCE_INTRINSICS
927 static inline void
928 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
929 {
930 	rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
931 		rte_memory_order_seq_cst);
932 }
933 #endif
934 
935 /**
936  * Atomically increment a 64-bit counter by one and test.
937  *
938  * @param v
939  *   A pointer to the atomic counter.
940  */
941 static inline void
942 rte_atomic64_inc(rte_atomic64_t *v);
943 
944 #ifdef RTE_FORCE_INTRINSICS
945 static inline void
946 rte_atomic64_inc(rte_atomic64_t *v)
947 {
948 	rte_atomic64_add(v, 1);
949 }
950 #endif
951 
952 /**
953  * Atomically decrement a 64-bit counter by one and test.
954  *
955  * @param v
956  *   A pointer to the atomic counter.
957  */
958 static inline void
959 rte_atomic64_dec(rte_atomic64_t *v);
960 
961 #ifdef RTE_FORCE_INTRINSICS
962 static inline void
963 rte_atomic64_dec(rte_atomic64_t *v)
964 {
965 	rte_atomic64_sub(v, 1);
966 }
967 #endif
968 
969 /**
970  * Add a 64-bit value to an atomic counter and return the result.
971  *
972  * Atomically adds the 64-bit value (inc) to the atomic counter (v) and
973  * returns the value of v after the addition.
974  *
975  * @param v
976  *   A pointer to the atomic counter.
977  * @param inc
978  *   The value to be added to the counter.
979  * @return
980  *   The value of v after the addition.
981  */
982 static inline int64_t
983 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
984 
985 #ifdef RTE_FORCE_INTRINSICS
986 static inline int64_t
987 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
988 {
989 	return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
990 		rte_memory_order_seq_cst) + inc;
991 }
992 #endif
993 
994 /**
995  * Subtract a 64-bit value from an atomic counter and return the result.
996  *
997  * Atomically subtracts the 64-bit value (dec) from the atomic counter (v)
998  * and returns the value of v after the subtraction.
999  *
1000  * @param v
1001  *   A pointer to the atomic counter.
1002  * @param dec
1003  *   The value to be subtracted from the counter.
1004  * @return
1005  *   The value of v after the subtraction.
1006  */
1007 static inline int64_t
1008 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1009 
1010 #ifdef RTE_FORCE_INTRINSICS
1011 static inline int64_t
1012 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1013 {
1014 	return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1015 		rte_memory_order_seq_cst) - dec;
1016 }
1017 #endif
1018 
1019 /**
1020  * Atomically increment a 64-bit counter by one and test.
1021  *
1022  * Atomically increments the atomic counter (v) by one and returns
1023  * true if the result is 0, or false in all other cases.
1024  *
1025  * @param v
1026  *   A pointer to the atomic counter.
1027  * @return
1028  *   True if the result after the addition is 0; false otherwise.
1029  */
1030 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1031 
1032 #ifdef RTE_FORCE_INTRINSICS
1033 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1034 {
1035 	return rte_atomic64_add_return(v, 1) == 0;
1036 }
1037 #endif
1038 
1039 /**
1040  * Atomically decrement a 64-bit counter by one and test.
1041  *
1042  * Atomically decrements the atomic counter (v) by one and returns true if
1043  * the result is 0, or false in all other cases.
1044  *
1045  * @param v
1046  *   A pointer to the atomic counter.
1047  * @return
1048  *   True if the result after subtraction is 0; false otherwise.
1049  */
1050 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1051 
1052 #ifdef RTE_FORCE_INTRINSICS
1053 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1054 {
1055 	return rte_atomic64_sub_return(v, 1) == 0;
1056 }
1057 #endif
1058 
1059 /**
1060  * Atomically test and set a 64-bit atomic counter.
1061  *
1062  * If the counter value is already set, return 0 (failed). Otherwise, set
1063  * the counter value to 1 and return 1 (success).
1064  *
1065  * @param v
1066  *   A pointer to the atomic counter.
1067  * @return
1068  *   0 if failed; else 1, success.
1069  */
1070 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1071 
1072 #ifdef RTE_FORCE_INTRINSICS
1073 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1074 {
1075 	return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1076 }
1077 #endif
1078 
1079 /**
1080  * Atomically set a 64-bit counter to 0.
1081  *
1082  * @param v
1083  *   A pointer to the atomic counter.
1084  */
1085 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1086 
1087 #ifdef RTE_FORCE_INTRINSICS
1088 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1089 {
1090 	rte_atomic64_set(v, 0);
1091 }
1092 #endif
1093 
1094 #endif
1095 
1096 /*------------------------ 128 bit atomic operations -------------------------*/
1097 
1098 /**
1099  * 128-bit integer structure.
1100  */
1101 typedef struct __rte_aligned(16) {
1102 	union {
1103 		uint64_t val[2];
1104 #ifdef RTE_ARCH_64
1105 #ifndef RTE_TOOLCHAIN_MSVC
1106 		__extension__ __int128 int128;
1107 #endif
1108 #endif
1109 	};
1110 } rte_int128_t;
1111 
1112 #ifdef __DOXYGEN__
1113 
1114 /**
1115  * An atomic compare and set function used by the mutex functions.
1116  * (Atomically) Equivalent to:
1117  * @code
1118  *   if (*dst == *exp)
1119  *     *dst = *src
1120  *   else
1121  *     *exp = *dst
1122  * @endcode
1123  *
1124  * @note This function is currently available for the x86-64 and aarch64
1125  * platforms.
1126  *
1127  * @note The success and failure arguments must be one of the __ATOMIC_* values
1128  * defined in the C++11 standard. For details on their behavior, refer to the
1129  * standard.
1130  *
1131  * @param dst
1132  *   The destination into which the value will be written.
1133  * @param exp
1134  *   Pointer to the expected value. If the operation fails, this memory is
1135  *   updated with the actual value.
1136  * @param src
1137  *   Pointer to the new value.
1138  * @param weak
1139  *   A value of true allows the comparison to spuriously fail and allows the
1140  *   'exp' update to occur non-atomically (i.e. a torn read may occur).
1141  *   Implementations may ignore this argument and only implement the strong
1142  *   variant.
1143  * @param success
1144  *   If successful, the operation's memory behavior conforms to this (or a
1145  *   stronger) model.
1146  * @param failure
1147  *   If unsuccessful, the operation's memory behavior conforms to this (or a
1148  *   stronger) model. This argument cannot be rte_memory_order_release,
1149  *   rte_memory_order_acq_rel, or a stronger model than success.
1150  * @return
1151  *   Non-zero on success; 0 on failure.
1152  */
1153 static inline int
1154 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1155 			   rte_int128_t *exp,
1156 			   const rte_int128_t *src,
1157 			   unsigned int weak,
1158 			   int success,
1159 			   int failure);
1160 
1161 #endif /* __DOXYGEN__ */
1162 
1163 #ifdef __cplusplus
1164 }
1165 #endif
1166 
1167 #endif /* _RTE_ATOMIC_H_ */
1168