xref: /dpdk/app/test/test_soring.c (revision 70581c355d6965f7be2dbf1c4fc0d30778c53b98)
1*70581c35SKonstantin Ananyev /* SPDX-License-Identifier: BSD-3-Clause
2*70581c35SKonstantin Ananyev  * Copyright(c) 2024 Huawei Technologies Co., Ltd
3*70581c35SKonstantin Ananyev  */
4*70581c35SKonstantin Ananyev 
5*70581c35SKonstantin Ananyev #include <string.h>
6*70581c35SKonstantin Ananyev #include <stdarg.h>
7*70581c35SKonstantin Ananyev #include <stdio.h>
8*70581c35SKonstantin Ananyev #include <stdlib.h>
9*70581c35SKonstantin Ananyev #include <stdint.h>
10*70581c35SKonstantin Ananyev #include <inttypes.h>
11*70581c35SKonstantin Ananyev #include <errno.h>
12*70581c35SKonstantin Ananyev #include <sys/queue.h>
13*70581c35SKonstantin Ananyev 
14*70581c35SKonstantin Ananyev #include <rte_common.h>
15*70581c35SKonstantin Ananyev #include <rte_log.h>
16*70581c35SKonstantin Ananyev #include <rte_memory.h>
17*70581c35SKonstantin Ananyev #include <rte_launch.h>
18*70581c35SKonstantin Ananyev #include <rte_cycles.h>
19*70581c35SKonstantin Ananyev #include <rte_eal.h>
20*70581c35SKonstantin Ananyev #include <rte_per_lcore.h>
21*70581c35SKonstantin Ananyev #include <rte_lcore.h>
22*70581c35SKonstantin Ananyev #include <rte_branch_prediction.h>
23*70581c35SKonstantin Ananyev #include <rte_malloc.h>
24*70581c35SKonstantin Ananyev #include <rte_random.h>
25*70581c35SKonstantin Ananyev #include <rte_errno.h>
26*70581c35SKonstantin Ananyev #include <rte_hexdump.h>
27*70581c35SKonstantin Ananyev 
28*70581c35SKonstantin Ananyev #include <rte_soring.h>
29*70581c35SKonstantin Ananyev 
30*70581c35SKonstantin Ananyev #include "test.h"
31*70581c35SKonstantin Ananyev 
32*70581c35SKonstantin Ananyev #define MAX_ACQUIRED 20
33*70581c35SKonstantin Ananyev 
34*70581c35SKonstantin Ananyev #define SORING_TEST_ASSERT(val, expected) do { \
35*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT(expected == val, \
36*70581c35SKonstantin Ananyev 			"%s: expected %u got %u\n", #val, expected, val); \
37*70581c35SKonstantin Ananyev } while (0)
38*70581c35SKonstantin Ananyev 
39*70581c35SKonstantin Ananyev static void
40*70581c35SKonstantin Ananyev set_soring_init_param(struct rte_soring_param *prm,
41*70581c35SKonstantin Ananyev 		const char *name, uint32_t esize, uint32_t elems,
42*70581c35SKonstantin Ananyev 		uint32_t stages, uint32_t stsize,
43*70581c35SKonstantin Ananyev 		enum rte_ring_sync_type rst_prod,
44*70581c35SKonstantin Ananyev 		enum rte_ring_sync_type rst_cons)
45*70581c35SKonstantin Ananyev {
46*70581c35SKonstantin Ananyev 	prm->name = name;
47*70581c35SKonstantin Ananyev 	prm->elem_size = esize;
48*70581c35SKonstantin Ananyev 	prm->elems = elems;
49*70581c35SKonstantin Ananyev 	prm->stages = stages;
50*70581c35SKonstantin Ananyev 	prm->meta_size = stsize;
51*70581c35SKonstantin Ananyev 	prm->prod_synt = rst_prod;
52*70581c35SKonstantin Ananyev 	prm->cons_synt = rst_cons;
53*70581c35SKonstantin Ananyev }
54*70581c35SKonstantin Ananyev 
55*70581c35SKonstantin Ananyev static int
56*70581c35SKonstantin Ananyev move_forward_stage(struct rte_soring *sor,
57*70581c35SKonstantin Ananyev 		uint32_t num_packets, uint32_t stage)
58*70581c35SKonstantin Ananyev {
59*70581c35SKonstantin Ananyev 	uint32_t acquired;
60*70581c35SKonstantin Ananyev 	uint32_t ftoken;
61*70581c35SKonstantin Ananyev 	uint32_t *acquired_objs[MAX_ACQUIRED];
62*70581c35SKonstantin Ananyev 
63*70581c35SKonstantin Ananyev 	acquired = rte_soring_acquire_bulk(sor, acquired_objs, stage,
64*70581c35SKonstantin Ananyev 			num_packets, &ftoken, NULL);
65*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(acquired, num_packets);
66*70581c35SKonstantin Ananyev 	rte_soring_release(sor, NULL, stage, num_packets,
67*70581c35SKonstantin Ananyev 			ftoken);
68*70581c35SKonstantin Ananyev 
69*70581c35SKonstantin Ananyev 	return 0;
70*70581c35SKonstantin Ananyev }
71*70581c35SKonstantin Ananyev 
72*70581c35SKonstantin Ananyev /*
73*70581c35SKonstantin Ananyev  * struct rte_soring_param param checking.
74*70581c35SKonstantin Ananyev  */
75*70581c35SKonstantin Ananyev static int
76*70581c35SKonstantin Ananyev test_soring_init(void)
77*70581c35SKonstantin Ananyev {
78*70581c35SKonstantin Ananyev 	struct rte_soring *sor = NULL;
79*70581c35SKonstantin Ananyev 	struct rte_soring_param prm;
80*70581c35SKonstantin Ananyev 	int rc;
81*70581c35SKonstantin Ananyev 	size_t sz;
82*70581c35SKonstantin Ananyev 	memset(&prm, 0, sizeof(prm));
83*70581c35SKonstantin Ananyev 
84*70581c35SKonstantin Ananyev 	/* init memory */
85*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "alloc_memory", sizeof(uintptr_t),
86*70581c35SKonstantin Ananyev 			4, 1, 4, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
87*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
88*70581c35SKonstantin Ananyev 	sor = rte_zmalloc(NULL, sz, RTE_CACHE_LINE_SIZE);
89*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_NOT_NULL(sor, "could not allocate memory for soring");
90*70581c35SKonstantin Ananyev 
91*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_invalid_stages", sizeof(uintptr_t),
92*70581c35SKonstantin Ananyev 			4, 0, 4, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
93*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
94*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_FAIL(rc, "initted soring with invalid num stages");
95*70581c35SKonstantin Ananyev 
96*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_invalid_esize", 0,
97*70581c35SKonstantin Ananyev 			4, 1, 4, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
98*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
99*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_FAIL(rc, "initted soring with 0 esize");
100*70581c35SKonstantin Ananyev 
101*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_invalid_esize", 9,
102*70581c35SKonstantin Ananyev 			4, 1, 4, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
103*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
104*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_FAIL(rc, "initted soring with esize not multiple of 4");
105*70581c35SKonstantin Ananyev 
106*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_invalid_rsize", sizeof(uintptr_t),
107*70581c35SKonstantin Ananyev 			4, 1, 3, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
108*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
109*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_FAIL(rc, "initted soring with rcsize not multiple of 4");
110*70581c35SKonstantin Ananyev 
111*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_invalid_elems", sizeof(uintptr_t),
112*70581c35SKonstantin Ananyev 			RTE_SORING_ELEM_MAX + 1, 1, 4, RTE_RING_SYNC_MT,
113*70581c35SKonstantin Ananyev 			RTE_RING_SYNC_MT);
114*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
115*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_FAIL(rc, "initted soring with invalid num elements");
116*70581c35SKonstantin Ananyev 
117*70581c35SKonstantin Ananyev 	rte_free(sor);
118*70581c35SKonstantin Ananyev 	return 0;
119*70581c35SKonstantin Ananyev }
120*70581c35SKonstantin Ananyev 
121*70581c35SKonstantin Ananyev static int
122*70581c35SKonstantin Ananyev test_soring_get_memsize(void)
123*70581c35SKonstantin Ananyev {
124*70581c35SKonstantin Ananyev 
125*70581c35SKonstantin Ananyev 	struct rte_soring_param prm;
126*70581c35SKonstantin Ananyev 	ssize_t sz;
127*70581c35SKonstantin Ananyev 
128*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "memsize", sizeof(uint32_t *),
129*70581c35SKonstantin Ananyev 			10, 1, 0, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
130*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
131*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT(sz > 0, "failed to calculate size");
132*70581c35SKonstantin Ananyev 
133*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "memsize", sizeof(uint8_t),
134*70581c35SKonstantin Ananyev 			16, UINT32_MAX, sizeof(uint32_t), RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
135*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
136*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_EQUAL(sz, -EINVAL, "calculated size incorrect");
137*70581c35SKonstantin Ananyev 
138*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "memsize", 0,
139*70581c35SKonstantin Ananyev 			16, UINT32_MAX, sizeof(uint32_t), RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
140*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
141*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_EQUAL(sz, -EINVAL, "calculated size incorrect");
142*70581c35SKonstantin Ananyev 
143*70581c35SKonstantin Ananyev 	return 0;
144*70581c35SKonstantin Ananyev 
145*70581c35SKonstantin Ananyev }
146*70581c35SKonstantin Ananyev 
147*70581c35SKonstantin Ananyev static int
148*70581c35SKonstantin Ananyev test_soring_stages(void)
149*70581c35SKonstantin Ananyev {
150*70581c35SKonstantin Ananyev 	struct rte_soring *sor = NULL;
151*70581c35SKonstantin Ananyev 	struct rte_soring_param prm;
152*70581c35SKonstantin Ananyev 	uint32_t objs[32];
153*70581c35SKonstantin Ananyev 	uint32_t rcs[32];
154*70581c35SKonstantin Ananyev 	uint32_t acquired_objs[32];
155*70581c35SKonstantin Ananyev 	uint32_t acquired_rcs[32];
156*70581c35SKonstantin Ananyev 	uint32_t dequeued_rcs[32];
157*70581c35SKonstantin Ananyev 	uint32_t dequeued_objs[32];
158*70581c35SKonstantin Ananyev 	size_t ssz;
159*70581c35SKonstantin Ananyev 	uint32_t stage, enqueued, dequeued, acquired;
160*70581c35SKonstantin Ananyev 	uint32_t i, ftoken;
161*70581c35SKonstantin Ananyev 
162*70581c35SKonstantin Ananyev 	memset(&prm, 0, sizeof(prm));
163*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "stages", sizeof(uint32_t), 32,
164*70581c35SKonstantin Ananyev 			10000, sizeof(uint32_t), RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
165*70581c35SKonstantin Ananyev 	ssz = rte_soring_get_memsize(&prm);
166*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT(ssz > 0, "parameter error calculating ring size");
167*70581c35SKonstantin Ananyev 	sor = rte_zmalloc(NULL, ssz, RTE_CACHE_LINE_SIZE);
168*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_NOT_NULL(sor, "couldn't allocate memory for soring");
169*70581c35SKonstantin Ananyev 	rte_soring_init(sor, &prm);
170*70581c35SKonstantin Ananyev 
171*70581c35SKonstantin Ananyev 	for (i = 0; i < 32; i++) {
172*70581c35SKonstantin Ananyev 		rcs[i] = i;
173*70581c35SKonstantin Ananyev 		objs[i] = i + 32;
174*70581c35SKonstantin Ananyev 	}
175*70581c35SKonstantin Ananyev 
176*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueux_burst(sor, objs, rcs, 32, NULL);
177*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 32);
178*70581c35SKonstantin Ananyev 
179*70581c35SKonstantin Ananyev 	for (stage = 0; stage < 10000; stage++) {
180*70581c35SKonstantin Ananyev 		int j;
181*70581c35SKonstantin Ananyev 		dequeued = rte_soring_dequeue_bulk(sor, dequeued_objs,
182*70581c35SKonstantin Ananyev 				32, NULL);
183*70581c35SKonstantin Ananyev 		/* check none at end stage */
184*70581c35SKonstantin Ananyev 		SORING_TEST_ASSERT(dequeued, 0);
185*70581c35SKonstantin Ananyev 
186*70581c35SKonstantin Ananyev 		acquired = rte_soring_acquirx_bulk(sor, acquired_objs,
187*70581c35SKonstantin Ananyev 				acquired_rcs, stage, 32, &ftoken, NULL);
188*70581c35SKonstantin Ananyev 		SORING_TEST_ASSERT(acquired, 32);
189*70581c35SKonstantin Ananyev 
190*70581c35SKonstantin Ananyev 		for (j = 0; j < 32; j++) {
191*70581c35SKonstantin Ananyev 			SORING_TEST_ASSERT(acquired_rcs[j], j + stage);
192*70581c35SKonstantin Ananyev 			SORING_TEST_ASSERT(acquired_objs[j], j + stage + 32);
193*70581c35SKonstantin Ananyev 			/* modify both queue object and rc */
194*70581c35SKonstantin Ananyev 			acquired_objs[j]++;
195*70581c35SKonstantin Ananyev 			acquired_rcs[j]++;
196*70581c35SKonstantin Ananyev 		}
197*70581c35SKonstantin Ananyev 
198*70581c35SKonstantin Ananyev 		rte_soring_releasx(sor, acquired_objs,
199*70581c35SKonstantin Ananyev 				acquired_rcs, stage, 32,
200*70581c35SKonstantin Ananyev 				ftoken);
201*70581c35SKonstantin Ananyev 	}
202*70581c35SKonstantin Ananyev 
203*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeux_bulk(sor, dequeued_objs, dequeued_rcs,
204*70581c35SKonstantin Ananyev 			32, NULL);
205*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 32);
206*70581c35SKonstantin Ananyev 		for (i = 0; i < 32; i++) {
207*70581c35SKonstantin Ananyev 			/* ensure we ended up with the expected values in order */
208*70581c35SKonstantin Ananyev 			SORING_TEST_ASSERT(dequeued_rcs[i], i + 10000);
209*70581c35SKonstantin Ananyev 			SORING_TEST_ASSERT(dequeued_objs[i], i + 32 + 10000);
210*70581c35SKonstantin Ananyev 		}
211*70581c35SKonstantin Ananyev 	rte_free(sor);
212*70581c35SKonstantin Ananyev 	return 0;
213*70581c35SKonstantin Ananyev }
214*70581c35SKonstantin Ananyev 
215*70581c35SKonstantin Ananyev static int
216*70581c35SKonstantin Ananyev test_soring_enqueue_dequeue(void)
217*70581c35SKonstantin Ananyev {
218*70581c35SKonstantin Ananyev 	struct rte_soring *sor = NULL;
219*70581c35SKonstantin Ananyev 	struct rte_soring_param prm;
220*70581c35SKonstantin Ananyev 	int rc;
221*70581c35SKonstantin Ananyev 	uint32_t i;
222*70581c35SKonstantin Ananyev 	size_t sz;
223*70581c35SKonstantin Ananyev 	uint32_t queue_objs[10];
224*70581c35SKonstantin Ananyev 	uint32_t *queue_objs_p[10];
225*70581c35SKonstantin Ananyev 	uint32_t free_space;
226*70581c35SKonstantin Ananyev 	uint32_t enqueued, dequeued;
227*70581c35SKonstantin Ananyev 	uint32_t *dequeued_objs[10];
228*70581c35SKonstantin Ananyev 
229*70581c35SKonstantin Ananyev 	memset(&prm, 0, sizeof(prm));
230*70581c35SKonstantin Ananyev 	for (i = 0; i < 10; i++) {
231*70581c35SKonstantin Ananyev 		queue_objs[i] = i + 1;
232*70581c35SKonstantin Ananyev 		queue_objs_p[i] = &queue_objs[i];
233*70581c35SKonstantin Ananyev 	}
234*70581c35SKonstantin Ananyev 
235*70581c35SKonstantin Ananyev 	/* init memory */
236*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "enqueue/dequeue", sizeof(uint32_t *),
237*70581c35SKonstantin Ananyev 			10, 1, 0, RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
238*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
239*70581c35SKonstantin Ananyev 	sor = rte_zmalloc(NULL, sz, RTE_CACHE_LINE_SIZE);
240*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_NOT_NULL(sor, "alloc failed for soring");
241*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
242*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_SUCCESS(rc, "Failed to init soring");
243*70581c35SKonstantin Ananyev 
244*70581c35SKonstantin Ananyev 	free_space = 0;
245*70581c35SKonstantin Ananyev 
246*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueue_burst(sor, queue_objs_p, 5, &free_space);
247*70581c35SKonstantin Ananyev 
248*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(free_space, 5);
249*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 5);
250*70581c35SKonstantin Ananyev 
251*70581c35SKonstantin Ananyev 	/* fixed amount enqueue */
252*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueue_bulk(sor, queue_objs_p, 7, &free_space);
253*70581c35SKonstantin Ananyev 
254*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(free_space, 5);
255*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 0);
256*70581c35SKonstantin Ananyev 
257*70581c35SKonstantin Ananyev 	/* variable amount enqueue */
258*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueue_burst(sor, queue_objs_p + 5, 7,
259*70581c35SKonstantin Ananyev 				&free_space);
260*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(free_space, 0);
261*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 5);
262*70581c35SKonstantin Ananyev 
263*70581c35SKonstantin Ananyev 	/* test no dequeue while stage 0 has not completed */
264*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_bulk(sor, dequeued_objs, 10, NULL);
265*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 0);
266*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_burst(sor, dequeued_objs, 10, NULL);
267*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 0);
268*70581c35SKonstantin Ananyev 
269*70581c35SKonstantin Ananyev 	move_forward_stage(sor, 8, 0);
270*70581c35SKonstantin Ananyev 
271*70581c35SKonstantin Ananyev 	/* can only dequeue as many as have completed stage */
272*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_bulk(sor, dequeued_objs, 10, NULL);
273*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 0);
274*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_burst(sor, dequeued_objs, 10, NULL);
275*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 8);
276*70581c35SKonstantin Ananyev 	/* packets remain in order */
277*70581c35SKonstantin Ananyev 	for (i = 0; i < dequeued; i++) {
278*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(dequeued_objs[i],
279*70581c35SKonstantin Ananyev 				queue_objs_p[i], "dequeued != enqueued");
280*70581c35SKonstantin Ananyev 	}
281*70581c35SKonstantin Ananyev 
282*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_burst(sor, dequeued_objs, 1, NULL);
283*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 0);
284*70581c35SKonstantin Ananyev 
285*70581c35SKonstantin Ananyev 	move_forward_stage(sor, 2, 0);
286*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeue_burst(sor, dequeued_objs, 2, NULL);
287*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 2);
288*70581c35SKonstantin Ananyev 	/* packets remain in order */
289*70581c35SKonstantin Ananyev 	for (i = 0; i < dequeued; i++) {
290*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(dequeued_objs[i],
291*70581c35SKonstantin Ananyev 				queue_objs_p[i + 8], "dequeued != enqueued");
292*70581c35SKonstantin Ananyev 	}
293*70581c35SKonstantin Ananyev 
294*70581c35SKonstantin Ananyev 	rte_soring_dump(stdout, sor);
295*70581c35SKonstantin Ananyev 	rte_free(sor);
296*70581c35SKonstantin Ananyev 	return 0;
297*70581c35SKonstantin Ananyev }
298*70581c35SKonstantin Ananyev 
299*70581c35SKonstantin Ananyev static int
300*70581c35SKonstantin Ananyev test_soring_acquire_release(void)
301*70581c35SKonstantin Ananyev {
302*70581c35SKonstantin Ananyev 
303*70581c35SKonstantin Ananyev 	struct rte_soring *sor = NULL;
304*70581c35SKonstantin Ananyev 	struct rte_soring_param prm;
305*70581c35SKonstantin Ananyev 	int rc, i;
306*70581c35SKonstantin Ananyev 	size_t sz;
307*70581c35SKonstantin Ananyev 
308*70581c35SKonstantin Ananyev 	memset(&prm, 0, sizeof(prm));
309*70581c35SKonstantin Ananyev 	uint32_t queue_objs[10];
310*70581c35SKonstantin Ananyev 	uint32_t rc_objs[10];
311*70581c35SKonstantin Ananyev 	uint32_t acquired_objs[10];
312*70581c35SKonstantin Ananyev 	uint32_t dequeued_objs[10];
313*70581c35SKonstantin Ananyev 	uint32_t dequeued_rcs[10];
314*70581c35SKonstantin Ananyev 	uint32_t s1_acquired_rcs[10];
315*70581c35SKonstantin Ananyev 	uint32_t free_space, enqueued, ftoken, acquired, dequeued;
316*70581c35SKonstantin Ananyev 
317*70581c35SKonstantin Ananyev 	for (i = 0; i < 10; i++) {
318*70581c35SKonstantin Ananyev 		queue_objs[i] = i + 5;
319*70581c35SKonstantin Ananyev 		rc_objs[i] = i + 10;
320*70581c35SKonstantin Ananyev 	}
321*70581c35SKonstantin Ananyev 
322*70581c35SKonstantin Ananyev /*init memory*/
323*70581c35SKonstantin Ananyev 	set_soring_init_param(&prm, "test_acquire_release", sizeof(uint32_t),
324*70581c35SKonstantin Ananyev 			20, 2, sizeof(uint32_t), RTE_RING_SYNC_MT, RTE_RING_SYNC_MT);
325*70581c35SKonstantin Ananyev 	sz = rte_soring_get_memsize(&prm);
326*70581c35SKonstantin Ananyev 	sor = rte_zmalloc(NULL, sz, RTE_CACHE_LINE_SIZE);
327*70581c35SKonstantin Ananyev 	if (sor == NULL) {
328*70581c35SKonstantin Ananyev 		printf("%s: alloc(%zu) for FIFO with %u elems failed",
329*70581c35SKonstantin Ananyev 			__func__, sz, prm.elems);
330*70581c35SKonstantin Ananyev 		return -ENOMEM;
331*70581c35SKonstantin Ananyev 	}
332*70581c35SKonstantin Ananyev 
333*70581c35SKonstantin Ananyev 	/* init ring */
334*70581c35SKonstantin Ananyev 	rc = rte_soring_init(sor, &prm);
335*70581c35SKonstantin Ananyev 	RTE_TEST_ASSERT_SUCCESS(rc, "failed to init soring");
336*70581c35SKonstantin Ananyev 
337*70581c35SKonstantin Ananyev 	/* enqueue with associated rc */
338*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueux_burst(sor, queue_objs, rc_objs, 5,
339*70581c35SKonstantin Ananyev 			&free_space);
340*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 5);
341*70581c35SKonstantin Ananyev 	/* enqueue without associated rc */
342*70581c35SKonstantin Ananyev 	enqueued = rte_soring_enqueue_burst(sor, queue_objs + 5, 5,
343*70581c35SKonstantin Ananyev 			&free_space);
344*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(enqueued, 5);
345*70581c35SKonstantin Ananyev 
346*70581c35SKonstantin Ananyev 	/* acquire the objects with rc's and ensure they are as expected */
347*70581c35SKonstantin Ananyev 	acquired = rte_soring_acquirx_burst(sor, acquired_objs,
348*70581c35SKonstantin Ananyev 			s1_acquired_rcs, 0, 5, &ftoken, NULL);
349*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(acquired, 5);
350*70581c35SKonstantin Ananyev 	for (i = 0; i < 5; i++) {
351*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(s1_acquired_rcs[i], rc_objs[i],
352*70581c35SKonstantin Ananyev 				"acquired rc[%d]: %u != enqueued rc: %u",
353*70581c35SKonstantin Ananyev 				i, s1_acquired_rcs[i], rc_objs[i]);
354*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(acquired_objs[i], queue_objs[i],
355*70581c35SKonstantin Ananyev 				"acquired obj[%d]: %u != enqueued obj %u",
356*70581c35SKonstantin Ananyev 				i, acquired_objs[i], queue_objs[i]);
357*70581c35SKonstantin Ananyev 	}
358*70581c35SKonstantin Ananyev 	rte_soring_release(sor, NULL, 0, 5, ftoken);
359*70581c35SKonstantin Ananyev 
360*70581c35SKonstantin Ananyev 	/* acquire the objects without rc's and ensure they are as expected */
361*70581c35SKonstantin Ananyev 	acquired = rte_soring_acquirx_burst(sor, acquired_objs,
362*70581c35SKonstantin Ananyev 			s1_acquired_rcs, 0, 5, &ftoken, NULL);
363*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(acquired, 5);
364*70581c35SKonstantin Ananyev 	for (i = 0; i < 5; i++) {
365*70581c35SKonstantin Ananyev 		/* as the rc area of memory is zero'd at init this is true
366*70581c35SKonstantin Ananyev 		 * but this is a detail of implementation rather than
367*70581c35SKonstantin Ananyev 		 * a guarantee.
368*70581c35SKonstantin Ananyev 		 */
369*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(s1_acquired_rcs[i], 0,
370*70581c35SKonstantin Ananyev 				"acquired rc not empty");
371*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(acquired_objs[i], queue_objs[i + 5],
372*70581c35SKonstantin Ananyev 				"acquired obj[%d]: %u != enqueued obj %u",
373*70581c35SKonstantin Ananyev 				i, acquired_objs[i], queue_objs[i + 5]);
374*70581c35SKonstantin Ananyev 	}
375*70581c35SKonstantin Ananyev 	/*release the objects, adding rc's */
376*70581c35SKonstantin Ananyev 	rte_soring_releasx(sor, NULL, rc_objs + 5, 0, 5,
377*70581c35SKonstantin Ananyev 			ftoken);
378*70581c35SKonstantin Ananyev 
379*70581c35SKonstantin Ananyev 	acquired = rte_soring_acquirx_burst(sor, acquired_objs,
380*70581c35SKonstantin Ananyev 			s1_acquired_rcs, 1, 10, &ftoken, NULL);
381*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(acquired, 10);
382*70581c35SKonstantin Ananyev 	for (i = 0; i < 10; i++) {
383*70581c35SKonstantin Ananyev 		/* ensure the associated rc's are the ones added at release */
384*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(s1_acquired_rcs[i], rc_objs[i],
385*70581c35SKonstantin Ananyev 				"acquired rc[%d]: %u != enqueued rc: %u",
386*70581c35SKonstantin Ananyev 				i, s1_acquired_rcs[i], rc_objs[i]);
387*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(acquired_objs[i], queue_objs[i],
388*70581c35SKonstantin Ananyev 				"acquired obj[%d]: %u != enqueued obj %u",
389*70581c35SKonstantin Ananyev 				i, acquired_objs[i], queue_objs[i]);
390*70581c35SKonstantin Ananyev 	}
391*70581c35SKonstantin Ananyev 	/* release the objects, with rc's set to NULL */
392*70581c35SKonstantin Ananyev 	rte_soring_release(sor, NULL, 1, 10, ftoken);
393*70581c35SKonstantin Ananyev 
394*70581c35SKonstantin Ananyev 	dequeued = rte_soring_dequeux_burst(sor, dequeued_objs, dequeued_rcs,
395*70581c35SKonstantin Ananyev 			10, NULL);
396*70581c35SKonstantin Ananyev 	SORING_TEST_ASSERT(dequeued, 10);
397*70581c35SKonstantin Ananyev 	for (i = 0; i < 10; i++) {
398*70581c35SKonstantin Ananyev 		/* ensure the associated rc's are the ones added at release */
399*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(dequeued_rcs[i], rc_objs[i],
400*70581c35SKonstantin Ananyev 				"dequeued rc[%d]: %u != enqueued rc: %u",
401*70581c35SKonstantin Ananyev 				i, dequeued_rcs[i], rc_objs[i]);
402*70581c35SKonstantin Ananyev 		RTE_TEST_ASSERT_EQUAL(acquired_objs[i], queue_objs[i],
403*70581c35SKonstantin Ananyev 				"acquired obj[%d]: %u != enqueued obj %u",
404*70581c35SKonstantin Ananyev 				i, acquired_objs[i], queue_objs[i]);
405*70581c35SKonstantin Ananyev 	}
406*70581c35SKonstantin Ananyev 	rte_soring_dump(stdout, sor);
407*70581c35SKonstantin Ananyev 	rte_free(sor);
408*70581c35SKonstantin Ananyev 	return 0;
409*70581c35SKonstantin Ananyev }
410*70581c35SKonstantin Ananyev 
411*70581c35SKonstantin Ananyev static int
412*70581c35SKonstantin Ananyev test_soring(void)
413*70581c35SKonstantin Ananyev {
414*70581c35SKonstantin Ananyev 
415*70581c35SKonstantin Ananyev 	/* Negative test cases */
416*70581c35SKonstantin Ananyev 	if (test_soring_init() < 0)
417*70581c35SKonstantin Ananyev 		goto test_fail;
418*70581c35SKonstantin Ananyev 
419*70581c35SKonstantin Ananyev 	/* Memory calculations */
420*70581c35SKonstantin Ananyev 	if (test_soring_get_memsize() < 0)
421*70581c35SKonstantin Ananyev 		goto test_fail;
422*70581c35SKonstantin Ananyev 
423*70581c35SKonstantin Ananyev 	/* Basic enqueue/dequeue operations */
424*70581c35SKonstantin Ananyev 	if (test_soring_enqueue_dequeue() < 0)
425*70581c35SKonstantin Ananyev 		goto test_fail;
426*70581c35SKonstantin Ananyev 
427*70581c35SKonstantin Ananyev 	/* Acquire/release */
428*70581c35SKonstantin Ananyev 	if (test_soring_acquire_release() < 0)
429*70581c35SKonstantin Ananyev 		goto test_fail;
430*70581c35SKonstantin Ananyev 
431*70581c35SKonstantin Ananyev 	/* Test large number of stages */
432*70581c35SKonstantin Ananyev 	if (test_soring_stages() < 0)
433*70581c35SKonstantin Ananyev 		goto test_fail;
434*70581c35SKonstantin Ananyev 
435*70581c35SKonstantin Ananyev 	return 0;
436*70581c35SKonstantin Ananyev 
437*70581c35SKonstantin Ananyev test_fail:
438*70581c35SKonstantin Ananyev 	return -1;
439*70581c35SKonstantin Ananyev }
440*70581c35SKonstantin Ananyev 
441*70581c35SKonstantin Ananyev REGISTER_FAST_TEST(soring_autotest, true, true, test_soring);
442