xref: /dpdk/app/test/test_ring_st_peek_stress.c (revision fb5d9d5482525129891e95d1a42588f40c6d4bfb)
1*fb5d9d54SKonstantin Ananyev /* SPDX-License-Identifier: BSD-3-Clause
2*fb5d9d54SKonstantin Ananyev  * Copyright(c) 2020 Intel Corporation
3*fb5d9d54SKonstantin Ananyev  */
4*fb5d9d54SKonstantin Ananyev 
5*fb5d9d54SKonstantin Ananyev #include "test_ring_stress_impl.h"
6*fb5d9d54SKonstantin Ananyev #include <rte_ring_elem.h>
7*fb5d9d54SKonstantin Ananyev 
8*fb5d9d54SKonstantin Ananyev static inline uint32_t
_st_ring_dequeue_bulk(struct rte_ring * r,void ** obj,uint32_t n,uint32_t * avail)9*fb5d9d54SKonstantin Ananyev _st_ring_dequeue_bulk(struct rte_ring *r, void **obj, uint32_t n,
10*fb5d9d54SKonstantin Ananyev 	uint32_t *avail)
11*fb5d9d54SKonstantin Ananyev {
12*fb5d9d54SKonstantin Ananyev 	uint32_t m;
13*fb5d9d54SKonstantin Ananyev 
14*fb5d9d54SKonstantin Ananyev 	static rte_spinlock_t lck = RTE_SPINLOCK_INITIALIZER;
15*fb5d9d54SKonstantin Ananyev 
16*fb5d9d54SKonstantin Ananyev 	rte_spinlock_lock(&lck);
17*fb5d9d54SKonstantin Ananyev 
18*fb5d9d54SKonstantin Ananyev 	m = rte_ring_dequeue_bulk_start(r, obj, n, avail);
19*fb5d9d54SKonstantin Ananyev 	n = (m == n) ? n : 0;
20*fb5d9d54SKonstantin Ananyev 	rte_ring_dequeue_finish(r, n);
21*fb5d9d54SKonstantin Ananyev 
22*fb5d9d54SKonstantin Ananyev 	rte_spinlock_unlock(&lck);
23*fb5d9d54SKonstantin Ananyev 	return n;
24*fb5d9d54SKonstantin Ananyev }
25*fb5d9d54SKonstantin Ananyev 
26*fb5d9d54SKonstantin Ananyev static inline uint32_t
_st_ring_enqueue_bulk(struct rte_ring * r,void * const * obj,uint32_t n,uint32_t * free)27*fb5d9d54SKonstantin Ananyev _st_ring_enqueue_bulk(struct rte_ring *r, void * const *obj, uint32_t n,
28*fb5d9d54SKonstantin Ananyev 	uint32_t *free)
29*fb5d9d54SKonstantin Ananyev {
30*fb5d9d54SKonstantin Ananyev 	uint32_t m;
31*fb5d9d54SKonstantin Ananyev 
32*fb5d9d54SKonstantin Ananyev 	static rte_spinlock_t lck = RTE_SPINLOCK_INITIALIZER;
33*fb5d9d54SKonstantin Ananyev 
34*fb5d9d54SKonstantin Ananyev 	rte_spinlock_lock(&lck);
35*fb5d9d54SKonstantin Ananyev 
36*fb5d9d54SKonstantin Ananyev 	m = rte_ring_enqueue_bulk_start(r, n, free);
37*fb5d9d54SKonstantin Ananyev 	n = (m == n) ? n : 0;
38*fb5d9d54SKonstantin Ananyev 	rte_ring_enqueue_finish(r, obj, n);
39*fb5d9d54SKonstantin Ananyev 
40*fb5d9d54SKonstantin Ananyev 	rte_spinlock_unlock(&lck);
41*fb5d9d54SKonstantin Ananyev 	return n;
42*fb5d9d54SKonstantin Ananyev }
43*fb5d9d54SKonstantin Ananyev 
44*fb5d9d54SKonstantin Ananyev static int
_st_ring_init(struct rte_ring * r,const char * name,uint32_t num)45*fb5d9d54SKonstantin Ananyev _st_ring_init(struct rte_ring *r, const char *name, uint32_t num)
46*fb5d9d54SKonstantin Ananyev {
47*fb5d9d54SKonstantin Ananyev 	return rte_ring_init(r, name, num, RING_F_SP_ENQ | RING_F_SC_DEQ);
48*fb5d9d54SKonstantin Ananyev }
49*fb5d9d54SKonstantin Ananyev 
50*fb5d9d54SKonstantin Ananyev const struct test test_ring_st_peek_stress = {
51*fb5d9d54SKonstantin Ananyev 	.name = "ST_PEEK",
52*fb5d9d54SKonstantin Ananyev 	.nb_case = RTE_DIM(tests),
53*fb5d9d54SKonstantin Ananyev 	.cases = tests,
54*fb5d9d54SKonstantin Ananyev };
55