xref: /dpdk/lib/ring/rte_ring_generic_pvt.h (revision 3197a1ff2a2a6bef224cd51f835f135be3776f23)
199a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause
299a2dd95SBruce Richardson  *
399a2dd95SBruce Richardson  * Copyright (c) 2010-2017 Intel Corporation
499a2dd95SBruce Richardson  * Copyright (c) 2007-2009 Kip Macy kmacy@freebsd.org
599a2dd95SBruce Richardson  * All rights reserved.
699a2dd95SBruce Richardson  * Derived from FreeBSD's bufring.h
799a2dd95SBruce Richardson  * Used as BSD-3 Licensed with permission from Kip Macy.
899a2dd95SBruce Richardson  */
999a2dd95SBruce Richardson 
1099a2dd95SBruce Richardson #ifndef _RTE_RING_GENERIC_PVT_H_
1199a2dd95SBruce Richardson #define _RTE_RING_GENERIC_PVT_H_
1299a2dd95SBruce Richardson 
13*3197a1ffSKonstantin Ananyev /**
14*3197a1ffSKonstantin Ananyev  * @file rte_ring_generic_pvt.h
15*3197a1ffSKonstantin Ananyev  * It is not recommended to include this file directly,
16*3197a1ffSKonstantin Ananyev  * include <rte_ring.h> instead.
17*3197a1ffSKonstantin Ananyev  * Contains internal helper functions for MP/SP and MC/SC ring modes.
18*3197a1ffSKonstantin Ananyev  * For more information please refer to <rte_ring.h>.
19*3197a1ffSKonstantin Ananyev  */
20*3197a1ffSKonstantin Ananyev 
21*3197a1ffSKonstantin Ananyev /**
22*3197a1ffSKonstantin Ananyev  * @internal This function updates tail values.
23*3197a1ffSKonstantin Ananyev  */
2499a2dd95SBruce Richardson static __rte_always_inline void
2599a2dd95SBruce Richardson __rte_ring_update_tail(struct rte_ring_headtail *ht, uint32_t old_val,
2699a2dd95SBruce Richardson 		uint32_t new_val, uint32_t single, uint32_t enqueue)
2799a2dd95SBruce Richardson {
2899a2dd95SBruce Richardson 	if (enqueue)
2999a2dd95SBruce Richardson 		rte_smp_wmb();
3099a2dd95SBruce Richardson 	else
3199a2dd95SBruce Richardson 		rte_smp_rmb();
3299a2dd95SBruce Richardson 	/*
3399a2dd95SBruce Richardson 	 * If there are other enqueues/dequeues in progress that preceded us,
3499a2dd95SBruce Richardson 	 * we need to wait for them to complete
3599a2dd95SBruce Richardson 	 */
3699a2dd95SBruce Richardson 	if (!single)
3732faaf30STyler Retzlaff 		rte_wait_until_equal_32((volatile uint32_t *)(uintptr_t)&ht->tail, old_val,
3832faaf30STyler Retzlaff 			rte_memory_order_relaxed);
3999a2dd95SBruce Richardson 
4099a2dd95SBruce Richardson 	ht->tail = new_val;
4199a2dd95SBruce Richardson }
4299a2dd95SBruce Richardson 
4399a2dd95SBruce Richardson /**
44*3197a1ffSKonstantin Ananyev  * @internal This is a helper function that moves the producer/consumer head
4599a2dd95SBruce Richardson  *
46*3197a1ffSKonstantin Ananyev  * @param d
47*3197a1ffSKonstantin Ananyev  *   A pointer to the headtail structure with head value to be moved
48*3197a1ffSKonstantin Ananyev  * @param s
49*3197a1ffSKonstantin Ananyev  *   A pointer to the counter-part headtail structure. Note that this
50*3197a1ffSKonstantin Ananyev  *   function only reads tail value from it
51*3197a1ffSKonstantin Ananyev  * @param capacity
52*3197a1ffSKonstantin Ananyev  *   Either ring capacity value (for producer), or zero (for consumer)
53*3197a1ffSKonstantin Ananyev  * @param is_st
54*3197a1ffSKonstantin Ananyev  *   Indicates whether multi-thread safe path is needed or not
5599a2dd95SBruce Richardson  * @param n
56*3197a1ffSKonstantin Ananyev  *   The number of elements we want to move head value on
5799a2dd95SBruce Richardson  * @param behavior
58*3197a1ffSKonstantin Ananyev  *   RTE_RING_QUEUE_FIXED:    Move on a fixed number of items
59*3197a1ffSKonstantin Ananyev  *   RTE_RING_QUEUE_VARIABLE: Move on as many items as possible
6099a2dd95SBruce Richardson  * @param old_head
61*3197a1ffSKonstantin Ananyev  *   Returns head value as it was before the move
6299a2dd95SBruce Richardson  * @param new_head
63*3197a1ffSKonstantin Ananyev  *   Returns the new head value
64*3197a1ffSKonstantin Ananyev  * @param entries
65*3197a1ffSKonstantin Ananyev  *   Returns the number of ring entries available BEFORE head was moved
6699a2dd95SBruce Richardson  * @return
67*3197a1ffSKonstantin Ananyev  *   Actual number of objects the head was moved on
68*3197a1ffSKonstantin Ananyev  *   If behavior == RTE_RING_QUEUE_FIXED, this will be 0 or n only
6999a2dd95SBruce Richardson  */
7099a2dd95SBruce Richardson static __rte_always_inline unsigned int
71*3197a1ffSKonstantin Ananyev __rte_ring_headtail_move_head(struct rte_ring_headtail *d,
72*3197a1ffSKonstantin Ananyev 		const struct rte_ring_headtail *s, uint32_t capacity,
73*3197a1ffSKonstantin Ananyev 		unsigned int is_st, unsigned int n,
74*3197a1ffSKonstantin Ananyev 		enum rte_ring_queue_behavior behavior,
75*3197a1ffSKonstantin Ananyev 		uint32_t *old_head, uint32_t *new_head, uint32_t *entries)
7699a2dd95SBruce Richardson {
7799a2dd95SBruce Richardson 	unsigned int max = n;
7899a2dd95SBruce Richardson 	int success;
7999a2dd95SBruce Richardson 
8099a2dd95SBruce Richardson 	do {
8199a2dd95SBruce Richardson 		/* Reset n to the initial burst count */
8299a2dd95SBruce Richardson 		n = max;
8399a2dd95SBruce Richardson 
84*3197a1ffSKonstantin Ananyev 		*old_head = d->head;
8599a2dd95SBruce Richardson 
8699a2dd95SBruce Richardson 		/* add rmb barrier to avoid load/load reorder in weak
8799a2dd95SBruce Richardson 		 * memory model. It is noop on x86
8899a2dd95SBruce Richardson 		 */
8999a2dd95SBruce Richardson 		rte_smp_rmb();
9099a2dd95SBruce Richardson 
9199a2dd95SBruce Richardson 		/*
9299a2dd95SBruce Richardson 		 *  The subtraction is done between two unsigned 32bits value
9399a2dd95SBruce Richardson 		 * (the result is always modulo 32 bits even if we have
94*3197a1ffSKonstantin Ananyev 		 * *old_head > s->tail). So 'entries' is always between 0
9599a2dd95SBruce Richardson 		 * and capacity (which is < size).
9699a2dd95SBruce Richardson 		 */
97*3197a1ffSKonstantin Ananyev 		*entries = (capacity + s->tail - *old_head);
9899a2dd95SBruce Richardson 
9999a2dd95SBruce Richardson 		/* check that we have enough room in ring */
100*3197a1ffSKonstantin Ananyev 		if (unlikely(n > *entries))
10199a2dd95SBruce Richardson 			n = (behavior == RTE_RING_QUEUE_FIXED) ?
102*3197a1ffSKonstantin Ananyev 					0 : *entries;
10399a2dd95SBruce Richardson 
10499a2dd95SBruce Richardson 		if (n == 0)
10599a2dd95SBruce Richardson 			return 0;
10699a2dd95SBruce Richardson 
10799a2dd95SBruce Richardson 		*new_head = *old_head + n;
108*3197a1ffSKonstantin Ananyev 		if (is_st) {
109*3197a1ffSKonstantin Ananyev 			d->head = *new_head;
11032faaf30STyler Retzlaff 			success = 1;
11132faaf30STyler Retzlaff 		} else
112*3197a1ffSKonstantin Ananyev 			success = rte_atomic32_cmpset(
113*3197a1ffSKonstantin Ananyev 					(uint32_t *)(uintptr_t)&d->head,
11499a2dd95SBruce Richardson 					*old_head, *new_head);
11599a2dd95SBruce Richardson 	} while (unlikely(success == 0));
11699a2dd95SBruce Richardson 	return n;
11799a2dd95SBruce Richardson }
11899a2dd95SBruce Richardson 
11999a2dd95SBruce Richardson #endif /* _RTE_RING_GENERIC_PVT_H_ */
120