xref: /dpdk/lib/stack/rte_stack_lf_generic.h (revision b6a19ec1400c750288cc643a64eabd41b9342af8)
199a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause
299a2dd95SBruce Richardson  * Copyright(c) 2019 Intel Corporation
399a2dd95SBruce Richardson  */
499a2dd95SBruce Richardson 
599a2dd95SBruce Richardson #ifndef _RTE_STACK_LF_GENERIC_H_
699a2dd95SBruce Richardson #define _RTE_STACK_LF_GENERIC_H_
799a2dd95SBruce Richardson 
899a2dd95SBruce Richardson #include <rte_branch_prediction.h>
999a2dd95SBruce Richardson #include <rte_prefetch.h>
1099a2dd95SBruce Richardson 
1199a2dd95SBruce Richardson static __rte_always_inline unsigned int
__rte_stack_lf_count(struct rte_stack * s)1299a2dd95SBruce Richardson __rte_stack_lf_count(struct rte_stack *s)
1399a2dd95SBruce Richardson {
1499a2dd95SBruce Richardson 	/* stack_lf_push() and stack_lf_pop() do not update the list's contents
1599a2dd95SBruce Richardson 	 * and stack_lf->len atomically, which can cause the list to appear
1699a2dd95SBruce Richardson 	 * shorter than it actually is if this function is called while other
1799a2dd95SBruce Richardson 	 * threads are modifying the list.
1899a2dd95SBruce Richardson 	 *
1999a2dd95SBruce Richardson 	 * However, given the inherently approximate nature of the get_count
2099a2dd95SBruce Richardson 	 * callback -- even if the list and its size were updated atomically,
2199a2dd95SBruce Richardson 	 * the size could change between when get_count executes and when the
2299a2dd95SBruce Richardson 	 * value is returned to the caller -- this is acceptable.
2399a2dd95SBruce Richardson 	 *
2499a2dd95SBruce Richardson 	 * The stack_lf->len updates are placed such that the list may appear to
2599a2dd95SBruce Richardson 	 * have fewer elements than it does, but will never appear to have more
2699a2dd95SBruce Richardson 	 * elements. If the mempool is near-empty to the point that this is a
2799a2dd95SBruce Richardson 	 * concern, the user should consider increasing the mempool size.
2899a2dd95SBruce Richardson 	 */
2908e29b37STyler Retzlaff 	/* NOTE: review for potential ordering optimization */
30*b6a19ec1STyler Retzlaff 	return rte_atomic_load_explicit(&s->stack_lf.used.len, rte_memory_order_seq_cst);
3199a2dd95SBruce Richardson }
3299a2dd95SBruce Richardson 
3399a2dd95SBruce Richardson static __rte_always_inline void
__rte_stack_lf_push_elems(struct rte_stack_lf_list * list,struct rte_stack_lf_elem * first,struct rte_stack_lf_elem * last,unsigned int num)3499a2dd95SBruce Richardson __rte_stack_lf_push_elems(struct rte_stack_lf_list *list,
3599a2dd95SBruce Richardson 			  struct rte_stack_lf_elem *first,
3699a2dd95SBruce Richardson 			  struct rte_stack_lf_elem *last,
3799a2dd95SBruce Richardson 			  unsigned int num)
3899a2dd95SBruce Richardson {
3999a2dd95SBruce Richardson 	struct rte_stack_lf_head old_head;
4099a2dd95SBruce Richardson 	int success;
4199a2dd95SBruce Richardson 
4299a2dd95SBruce Richardson 	old_head = list->head;
4399a2dd95SBruce Richardson 
4499a2dd95SBruce Richardson 	do {
4599a2dd95SBruce Richardson 		struct rte_stack_lf_head new_head;
4699a2dd95SBruce Richardson 
4799a2dd95SBruce Richardson 		/* An acquire fence (or stronger) is needed for weak memory
4899a2dd95SBruce Richardson 		 * models to establish a synchronized-with relationship between
4999a2dd95SBruce Richardson 		 * the list->head load and store-release operations (as part of
5099a2dd95SBruce Richardson 		 * the rte_atomic128_cmp_exchange()).
5199a2dd95SBruce Richardson 		 */
5299a2dd95SBruce Richardson 		rte_smp_mb();
5399a2dd95SBruce Richardson 
5499a2dd95SBruce Richardson 		/* Swing the top pointer to the first element in the list and
5599a2dd95SBruce Richardson 		 * make the last element point to the old top.
5699a2dd95SBruce Richardson 		 */
5799a2dd95SBruce Richardson 		new_head.top = first;
5899a2dd95SBruce Richardson 		new_head.cnt = old_head.cnt + 1;
5999a2dd95SBruce Richardson 
6099a2dd95SBruce Richardson 		last->next = old_head.top;
6199a2dd95SBruce Richardson 
6299a2dd95SBruce Richardson 		/* old_head is updated on failure */
6399a2dd95SBruce Richardson 		success = rte_atomic128_cmp_exchange(
6499a2dd95SBruce Richardson 				(rte_int128_t *)&list->head,
6599a2dd95SBruce Richardson 				(rte_int128_t *)&old_head,
6699a2dd95SBruce Richardson 				(rte_int128_t *)&new_head,
67*b6a19ec1STyler Retzlaff 				1, rte_memory_order_release,
68*b6a19ec1STyler Retzlaff 				rte_memory_order_relaxed);
6999a2dd95SBruce Richardson 	} while (success == 0);
7008e29b37STyler Retzlaff 	/* NOTE: review for potential ordering optimization */
71*b6a19ec1STyler Retzlaff 	rte_atomic_fetch_add_explicit(&list->len, num, rte_memory_order_seq_cst);
7299a2dd95SBruce Richardson }
7399a2dd95SBruce Richardson 
7499a2dd95SBruce Richardson static __rte_always_inline struct rte_stack_lf_elem *
__rte_stack_lf_pop_elems(struct rte_stack_lf_list * list,unsigned int num,void ** obj_table,struct rte_stack_lf_elem ** last)7599a2dd95SBruce Richardson __rte_stack_lf_pop_elems(struct rte_stack_lf_list *list,
7699a2dd95SBruce Richardson 			 unsigned int num,
7799a2dd95SBruce Richardson 			 void **obj_table,
7899a2dd95SBruce Richardson 			 struct rte_stack_lf_elem **last)
7999a2dd95SBruce Richardson {
8099a2dd95SBruce Richardson 	struct rte_stack_lf_head old_head;
8199a2dd95SBruce Richardson 	int success = 0;
8299a2dd95SBruce Richardson 
8399a2dd95SBruce Richardson 	/* Reserve num elements, if available */
8499a2dd95SBruce Richardson 	while (1) {
8508e29b37STyler Retzlaff 		/* NOTE: review for potential ordering optimization */
86*b6a19ec1STyler Retzlaff 		uint64_t len = rte_atomic_load_explicit(&list->len, rte_memory_order_seq_cst);
8799a2dd95SBruce Richardson 
8899a2dd95SBruce Richardson 		/* Does the list contain enough elements? */
8999a2dd95SBruce Richardson 		if (unlikely(len < num))
9099a2dd95SBruce Richardson 			return NULL;
9199a2dd95SBruce Richardson 
9208e29b37STyler Retzlaff 		/* NOTE: review for potential ordering optimization */
93*b6a19ec1STyler Retzlaff 		if (rte_atomic_compare_exchange_strong_explicit(&list->len, &len, len - num,
94*b6a19ec1STyler Retzlaff 				rte_memory_order_seq_cst, rte_memory_order_seq_cst))
9599a2dd95SBruce Richardson 			break;
9699a2dd95SBruce Richardson 	}
9799a2dd95SBruce Richardson 
9899a2dd95SBruce Richardson 	old_head = list->head;
9999a2dd95SBruce Richardson 
10099a2dd95SBruce Richardson 	/* Pop num elements */
10199a2dd95SBruce Richardson 	do {
10299a2dd95SBruce Richardson 		struct rte_stack_lf_head new_head;
10399a2dd95SBruce Richardson 		struct rte_stack_lf_elem *tmp;
10499a2dd95SBruce Richardson 		unsigned int i;
10599a2dd95SBruce Richardson 
10699a2dd95SBruce Richardson 		/* An acquire fence (or stronger) is needed for weak memory
10799a2dd95SBruce Richardson 		 * models to ensure the LF LIFO element reads are properly
10899a2dd95SBruce Richardson 		 * ordered with respect to the head pointer read.
10999a2dd95SBruce Richardson 		 */
11099a2dd95SBruce Richardson 		rte_smp_mb();
11199a2dd95SBruce Richardson 
11299a2dd95SBruce Richardson 		rte_prefetch0(old_head.top);
11399a2dd95SBruce Richardson 
11499a2dd95SBruce Richardson 		tmp = old_head.top;
11599a2dd95SBruce Richardson 
11699a2dd95SBruce Richardson 		/* Traverse the list to find the new head. A next pointer will
11799a2dd95SBruce Richardson 		 * either point to another element or NULL; if a thread
11899a2dd95SBruce Richardson 		 * encounters a pointer that has already been popped, the CAS
11999a2dd95SBruce Richardson 		 * will fail.
12099a2dd95SBruce Richardson 		 */
12199a2dd95SBruce Richardson 		for (i = 0; i < num && tmp != NULL; i++) {
12299a2dd95SBruce Richardson 			rte_prefetch0(tmp->next);
12399a2dd95SBruce Richardson 			if (obj_table)
12499a2dd95SBruce Richardson 				obj_table[i] = tmp->data;
12599a2dd95SBruce Richardson 			if (last)
12699a2dd95SBruce Richardson 				*last = tmp;
12799a2dd95SBruce Richardson 			tmp = tmp->next;
12899a2dd95SBruce Richardson 		}
12999a2dd95SBruce Richardson 
13099a2dd95SBruce Richardson 		/* If NULL was encountered, the list was modified while
13199a2dd95SBruce Richardson 		 * traversing it. Retry.
13299a2dd95SBruce Richardson 		 */
1336ded44bcSJulien Meunier 		if (i != num) {
1346ded44bcSJulien Meunier 			old_head = list->head;
13599a2dd95SBruce Richardson 			continue;
1366ded44bcSJulien Meunier 		}
13799a2dd95SBruce Richardson 
13899a2dd95SBruce Richardson 		new_head.top = tmp;
13999a2dd95SBruce Richardson 		new_head.cnt = old_head.cnt + 1;
14099a2dd95SBruce Richardson 
14199a2dd95SBruce Richardson 		/* old_head is updated on failure */
14299a2dd95SBruce Richardson 		success = rte_atomic128_cmp_exchange(
14399a2dd95SBruce Richardson 				(rte_int128_t *)&list->head,
14499a2dd95SBruce Richardson 				(rte_int128_t *)&old_head,
14599a2dd95SBruce Richardson 				(rte_int128_t *)&new_head,
146*b6a19ec1STyler Retzlaff 				1, rte_memory_order_release,
147*b6a19ec1STyler Retzlaff 				rte_memory_order_relaxed);
14899a2dd95SBruce Richardson 	} while (success == 0);
14999a2dd95SBruce Richardson 
15099a2dd95SBruce Richardson 	return old_head.top;
15199a2dd95SBruce Richardson }
15299a2dd95SBruce Richardson 
15399a2dd95SBruce Richardson #endif /* _RTE_STACK_LF_GENERIC_H_ */
154