xref: /dpdk/app/test/test_ring_stress_impl.h (revision 700989f512bbc2ee9758a8a9cb6973cfdeda6f27)
1bf28df24SKonstantin Ananyev /* SPDX-License-Identifier: BSD-3-Clause
2bf28df24SKonstantin Ananyev  * Copyright(c) 2020 Intel Corporation
3bf28df24SKonstantin Ananyev  */
4bf28df24SKonstantin Ananyev 
508966fe7STyler Retzlaff #include <stdalign.h>
608966fe7STyler Retzlaff 
7bf28df24SKonstantin Ananyev #include "test_ring_stress.h"
8bf28df24SKonstantin Ananyev 
9bf28df24SKonstantin Ananyev /**
10bf28df24SKonstantin Ananyev  * Stress test for ring enqueue/dequeue operations.
11cb056611SStephen Hemminger  * Performs the following pattern on each worker:
12bf28df24SKonstantin Ananyev  * dequeue/read-write data from the dequeued objects/enqueue.
13bf28df24SKonstantin Ananyev  * Serves as both functional and performance test of ring
14bf28df24SKonstantin Ananyev  * enqueue/dequeue operations under high contention
15bf28df24SKonstantin Ananyev  * (for both over committed and non-over committed scenarios).
16bf28df24SKonstantin Ananyev  */
17bf28df24SKonstantin Ananyev 
18bf28df24SKonstantin Ananyev #define RING_NAME	"RING_STRESS"
19bf28df24SKonstantin Ananyev #define BULK_NUM	32
20bf28df24SKonstantin Ananyev #define RING_SIZE	(2 * BULK_NUM * RTE_MAX_LCORE)
21bf28df24SKonstantin Ananyev 
22bf28df24SKonstantin Ananyev enum {
23bf28df24SKonstantin Ananyev 	WRK_CMD_STOP,
24bf28df24SKonstantin Ananyev 	WRK_CMD_RUN,
25bf28df24SKonstantin Ananyev };
26bf28df24SKonstantin Ananyev 
27b6a7e685STyler Retzlaff static alignas(RTE_CACHE_LINE_SIZE) RTE_ATOMIC(uint32_t) wrk_cmd = WRK_CMD_STOP;
28bf28df24SKonstantin Ananyev 
29bf28df24SKonstantin Ananyev /* test run-time in seconds */
30bf28df24SKonstantin Ananyev static const uint32_t run_time = 60;
31bf28df24SKonstantin Ananyev static const uint32_t verbose;
32bf28df24SKonstantin Ananyev 
33bf28df24SKonstantin Ananyev struct lcore_stat {
34bf28df24SKonstantin Ananyev 	uint64_t nb_cycle;
35bf28df24SKonstantin Ananyev 	struct {
36bf28df24SKonstantin Ananyev 		uint64_t nb_call;
37bf28df24SKonstantin Ananyev 		uint64_t nb_obj;
38bf28df24SKonstantin Ananyev 		uint64_t nb_cycle;
39bf28df24SKonstantin Ananyev 		uint64_t max_cycle;
40bf28df24SKonstantin Ananyev 		uint64_t min_cycle;
41bf28df24SKonstantin Ananyev 	} op;
42bf28df24SKonstantin Ananyev };
43bf28df24SKonstantin Ananyev 
440efea35aSTyler Retzlaff struct __rte_cache_aligned lcore_arg {
45bf28df24SKonstantin Ananyev 	struct rte_ring *rng;
46bf28df24SKonstantin Ananyev 	struct lcore_stat stats;
470efea35aSTyler Retzlaff };
48bf28df24SKonstantin Ananyev 
490efea35aSTyler Retzlaff struct __rte_cache_aligned ring_elem {
50bf28df24SKonstantin Ananyev 	uint32_t cnt[RTE_CACHE_LINE_SIZE / sizeof(uint32_t)];
510efea35aSTyler Retzlaff };
52bf28df24SKonstantin Ananyev 
53bf28df24SKonstantin Ananyev /*
54bf28df24SKonstantin Ananyev  * redefinable functions
55bf28df24SKonstantin Ananyev  */
56bf28df24SKonstantin Ananyev static uint32_t
57bf28df24SKonstantin Ananyev _st_ring_dequeue_bulk(struct rte_ring *r, void **obj, uint32_t n,
58bf28df24SKonstantin Ananyev 	uint32_t *avail);
59bf28df24SKonstantin Ananyev 
60bf28df24SKonstantin Ananyev static uint32_t
61bf28df24SKonstantin Ananyev _st_ring_enqueue_bulk(struct rte_ring *r, void * const *obj, uint32_t n,
62bf28df24SKonstantin Ananyev 	uint32_t *free);
63bf28df24SKonstantin Ananyev 
64bf28df24SKonstantin Ananyev static int
65bf28df24SKonstantin Ananyev _st_ring_init(struct rte_ring *r, const char *name, uint32_t num);
66bf28df24SKonstantin Ananyev 
67bf28df24SKonstantin Ananyev 
68bf28df24SKonstantin Ananyev static void
69bf28df24SKonstantin Ananyev lcore_stat_update(struct lcore_stat *ls, uint64_t call, uint64_t obj,
70bf28df24SKonstantin Ananyev 	uint64_t tm, int32_t prcs)
71bf28df24SKonstantin Ananyev {
72bf28df24SKonstantin Ananyev 	ls->op.nb_call += call;
73bf28df24SKonstantin Ananyev 	ls->op.nb_obj += obj;
74bf28df24SKonstantin Ananyev 	ls->op.nb_cycle += tm;
75bf28df24SKonstantin Ananyev 	if (prcs) {
76bf28df24SKonstantin Ananyev 		ls->op.max_cycle = RTE_MAX(ls->op.max_cycle, tm);
77bf28df24SKonstantin Ananyev 		ls->op.min_cycle = RTE_MIN(ls->op.min_cycle, tm);
78bf28df24SKonstantin Ananyev 	}
79bf28df24SKonstantin Ananyev }
80bf28df24SKonstantin Ananyev 
81bf28df24SKonstantin Ananyev static void
82bf28df24SKonstantin Ananyev lcore_op_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls)
83bf28df24SKonstantin Ananyev {
84bf28df24SKonstantin Ananyev 
85bf28df24SKonstantin Ananyev 	ms->op.nb_call += ls->op.nb_call;
86bf28df24SKonstantin Ananyev 	ms->op.nb_obj += ls->op.nb_obj;
87bf28df24SKonstantin Ananyev 	ms->op.nb_cycle += ls->op.nb_cycle;
88bf28df24SKonstantin Ananyev 	ms->op.max_cycle = RTE_MAX(ms->op.max_cycle, ls->op.max_cycle);
89bf28df24SKonstantin Ananyev 	ms->op.min_cycle = RTE_MIN(ms->op.min_cycle, ls->op.min_cycle);
90bf28df24SKonstantin Ananyev }
91bf28df24SKonstantin Ananyev 
92bf28df24SKonstantin Ananyev static void
93bf28df24SKonstantin Ananyev lcore_stat_aggr(struct lcore_stat *ms, const struct lcore_stat *ls)
94bf28df24SKonstantin Ananyev {
95bf28df24SKonstantin Ananyev 	ms->nb_cycle = RTE_MAX(ms->nb_cycle, ls->nb_cycle);
96bf28df24SKonstantin Ananyev 	lcore_op_stat_aggr(ms, ls);
97bf28df24SKonstantin Ananyev }
98bf28df24SKonstantin Ananyev 
99bf28df24SKonstantin Ananyev static void
100bf28df24SKonstantin Ananyev lcore_stat_dump(FILE *f, uint32_t lc, const struct lcore_stat *ls)
101bf28df24SKonstantin Ananyev {
102bf28df24SKonstantin Ananyev 	long double st;
103bf28df24SKonstantin Ananyev 
104bf28df24SKonstantin Ananyev 	st = (long double)rte_get_timer_hz() / US_PER_S;
105bf28df24SKonstantin Ananyev 
106bf28df24SKonstantin Ananyev 	if (lc == UINT32_MAX)
107bf28df24SKonstantin Ananyev 		fprintf(f, "%s(AGGREGATE)={\n", __func__);
108bf28df24SKonstantin Ananyev 	else
109bf28df24SKonstantin Ananyev 		fprintf(f, "%s(lcore=%u)={\n", __func__, lc);
110bf28df24SKonstantin Ananyev 
111bf28df24SKonstantin Ananyev 	fprintf(f, "\tnb_cycle=%" PRIu64 "(%.2Lf usec),\n",
112bf28df24SKonstantin Ananyev 		ls->nb_cycle, (long double)ls->nb_cycle / st);
113bf28df24SKonstantin Ananyev 
114bf28df24SKonstantin Ananyev 	fprintf(f, "\tDEQ+ENQ={\n");
115bf28df24SKonstantin Ananyev 
116bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tnb_call=%" PRIu64 ",\n", ls->op.nb_call);
117bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tnb_obj=%" PRIu64 ",\n", ls->op.nb_obj);
118bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tnb_cycle=%" PRIu64 ",\n", ls->op.nb_cycle);
119bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tobj/call(avg): %.2Lf\n",
120bf28df24SKonstantin Ananyev 		(long double)ls->op.nb_obj / ls->op.nb_call);
121bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tcycles/obj(avg): %.2Lf\n",
122bf28df24SKonstantin Ananyev 		(long double)ls->op.nb_cycle / ls->op.nb_obj);
123bf28df24SKonstantin Ananyev 	fprintf(f, "\t\tcycles/call(avg): %.2Lf\n",
124bf28df24SKonstantin Ananyev 		(long double)ls->op.nb_cycle / ls->op.nb_call);
125bf28df24SKonstantin Ananyev 
126bf28df24SKonstantin Ananyev 	/* if min/max cycles per call stats was collected */
127bf28df24SKonstantin Ananyev 	if (ls->op.min_cycle != UINT64_MAX) {
128bf28df24SKonstantin Ananyev 		fprintf(f, "\t\tmax cycles/call=%" PRIu64 "(%.2Lf usec),\n",
129bf28df24SKonstantin Ananyev 			ls->op.max_cycle,
130bf28df24SKonstantin Ananyev 			(long double)ls->op.max_cycle / st);
131bf28df24SKonstantin Ananyev 		fprintf(f, "\t\tmin cycles/call=%" PRIu64 "(%.2Lf usec),\n",
132bf28df24SKonstantin Ananyev 			ls->op.min_cycle,
133bf28df24SKonstantin Ananyev 			(long double)ls->op.min_cycle / st);
134bf28df24SKonstantin Ananyev 	}
135bf28df24SKonstantin Ananyev 
136bf28df24SKonstantin Ananyev 	fprintf(f, "\t},\n");
137bf28df24SKonstantin Ananyev 	fprintf(f, "};\n");
138bf28df24SKonstantin Ananyev }
139bf28df24SKonstantin Ananyev 
140bf28df24SKonstantin Ananyev static void
141bf28df24SKonstantin Ananyev fill_ring_elm(struct ring_elem *elm, uint32_t fill)
142bf28df24SKonstantin Ananyev {
143bf28df24SKonstantin Ananyev 	uint32_t i;
144bf28df24SKonstantin Ananyev 
145bf28df24SKonstantin Ananyev 	for (i = 0; i != RTE_DIM(elm->cnt); i++)
146bf28df24SKonstantin Ananyev 		elm->cnt[i] = fill;
147bf28df24SKonstantin Ananyev }
148bf28df24SKonstantin Ananyev 
149bf28df24SKonstantin Ananyev static int32_t
150bf28df24SKonstantin Ananyev check_updt_elem(struct ring_elem *elm[], uint32_t num,
151bf28df24SKonstantin Ananyev 	const struct ring_elem *check, const struct ring_elem *fill)
152bf28df24SKonstantin Ananyev {
153bf28df24SKonstantin Ananyev 	uint32_t i;
154bf28df24SKonstantin Ananyev 
155bf28df24SKonstantin Ananyev 	static rte_spinlock_t dump_lock;
156bf28df24SKonstantin Ananyev 
157bf28df24SKonstantin Ananyev 	for (i = 0; i != num; i++) {
158bf28df24SKonstantin Ananyev 		if (memcmp(check, elm[i], sizeof(*check)) != 0) {
159bf28df24SKonstantin Ananyev 			rte_spinlock_lock(&dump_lock);
160bf28df24SKonstantin Ananyev 			printf("%s(lc=%u, num=%u) failed at %u-th iter, "
161bf28df24SKonstantin Ananyev 				"offending object: %p\n",
162bf28df24SKonstantin Ananyev 				__func__, rte_lcore_id(), num, i, elm[i]);
163bf28df24SKonstantin Ananyev 			rte_memdump(stdout, "expected", check, sizeof(*check));
1643097cc9fSHonnappa Nagarahalli 			rte_memdump(stdout, "result", elm[i], sizeof(*elm[i]));
165bf28df24SKonstantin Ananyev 			rte_spinlock_unlock(&dump_lock);
166bf28df24SKonstantin Ananyev 			return -EINVAL;
167bf28df24SKonstantin Ananyev 		}
168bf28df24SKonstantin Ananyev 		memcpy(elm[i], fill, sizeof(*elm[i]));
169bf28df24SKonstantin Ananyev 	}
170bf28df24SKonstantin Ananyev 
171bf28df24SKonstantin Ananyev 	return 0;
172bf28df24SKonstantin Ananyev }
173bf28df24SKonstantin Ananyev 
174bf28df24SKonstantin Ananyev static int
175bf28df24SKonstantin Ananyev check_ring_op(uint32_t exp, uint32_t res, uint32_t lc,
176bf28df24SKonstantin Ananyev 	const char *fname, const char *opname)
177bf28df24SKonstantin Ananyev {
178bf28df24SKonstantin Ananyev 	if (exp != res) {
179bf28df24SKonstantin Ananyev 		printf("%s(lc=%u) failure: %s expected: %u, returned %u\n",
180bf28df24SKonstantin Ananyev 			fname, lc, opname, exp, res);
181bf28df24SKonstantin Ananyev 		return -ENOSPC;
182bf28df24SKonstantin Ananyev 	}
183bf28df24SKonstantin Ananyev 	return 0;
184bf28df24SKonstantin Ananyev }
185bf28df24SKonstantin Ananyev 
186bf28df24SKonstantin Ananyev static int
187bf28df24SKonstantin Ananyev test_worker(void *arg, const char *fname, int32_t prcs)
188bf28df24SKonstantin Ananyev {
189bf28df24SKonstantin Ananyev 	int32_t rc;
190bf28df24SKonstantin Ananyev 	uint32_t lc, n, num;
191bf28df24SKonstantin Ananyev 	uint64_t cl, tm0, tm1;
192bf28df24SKonstantin Ananyev 	struct lcore_arg *la;
193bf28df24SKonstantin Ananyev 	struct ring_elem def_elm, loc_elm;
194bf28df24SKonstantin Ananyev 	struct ring_elem *obj[2 * BULK_NUM];
195bf28df24SKonstantin Ananyev 
196bf28df24SKonstantin Ananyev 	la = arg;
197bf28df24SKonstantin Ananyev 	lc = rte_lcore_id();
198bf28df24SKonstantin Ananyev 
199bf28df24SKonstantin Ananyev 	fill_ring_elm(&def_elm, UINT32_MAX);
200bf28df24SKonstantin Ananyev 	fill_ring_elm(&loc_elm, lc);
201bf28df24SKonstantin Ananyev 
202505fc416SHonnappa Nagarahalli 	/* Acquire ordering is not required as the main is not
203505fc416SHonnappa Nagarahalli 	 * really releasing any data through 'wrk_cmd' to
204505fc416SHonnappa Nagarahalli 	 * the worker.
205505fc416SHonnappa Nagarahalli 	 */
206b6a7e685STyler Retzlaff 	while (rte_atomic_load_explicit(&wrk_cmd, rte_memory_order_relaxed) != WRK_CMD_RUN)
207bf28df24SKonstantin Ananyev 		rte_pause();
208bf28df24SKonstantin Ananyev 
209bf28df24SKonstantin Ananyev 	cl = rte_rdtsc_precise();
210bf28df24SKonstantin Ananyev 
211bf28df24SKonstantin Ananyev 	do {
212bf28df24SKonstantin Ananyev 		/* num in interval [7/8, 11/8] of BULK_NUM */
213bf28df24SKonstantin Ananyev 		num = 7 * BULK_NUM / 8 + rte_rand() % (BULK_NUM / 2);
214bf28df24SKonstantin Ananyev 
215bf28df24SKonstantin Ananyev 		/* reset all pointer values */
216bf28df24SKonstantin Ananyev 		memset(obj, 0, sizeof(obj));
217bf28df24SKonstantin Ananyev 
218bf28df24SKonstantin Ananyev 		/* dequeue num elems */
219bf28df24SKonstantin Ananyev 		tm0 = (prcs != 0) ? rte_rdtsc_precise() : 0;
220bf28df24SKonstantin Ananyev 		n = _st_ring_dequeue_bulk(la->rng, (void **)obj, num, NULL);
221bf28df24SKonstantin Ananyev 		tm0 = (prcs != 0) ? rte_rdtsc_precise() - tm0 : 0;
222bf28df24SKonstantin Ananyev 
223bf28df24SKonstantin Ananyev 		/* check return value and objects */
224bf28df24SKonstantin Ananyev 		rc = check_ring_op(num, n, lc, fname,
225bf28df24SKonstantin Ananyev 			RTE_STR(_st_ring_dequeue_bulk));
226bf28df24SKonstantin Ananyev 		if (rc == 0)
227bf28df24SKonstantin Ananyev 			rc = check_updt_elem(obj, num, &def_elm, &loc_elm);
228bf28df24SKonstantin Ananyev 		if (rc != 0)
229bf28df24SKonstantin Ananyev 			break;
230bf28df24SKonstantin Ananyev 
231bf28df24SKonstantin Ananyev 		/* enqueue num elems */
232bf28df24SKonstantin Ananyev 		rte_compiler_barrier();
233bf28df24SKonstantin Ananyev 		rc = check_updt_elem(obj, num, &loc_elm, &def_elm);
234bf28df24SKonstantin Ananyev 		if (rc != 0)
235bf28df24SKonstantin Ananyev 			break;
236bf28df24SKonstantin Ananyev 
237bf28df24SKonstantin Ananyev 		tm1 = (prcs != 0) ? rte_rdtsc_precise() : 0;
238bf28df24SKonstantin Ananyev 		n = _st_ring_enqueue_bulk(la->rng, (void **)obj, num, NULL);
239bf28df24SKonstantin Ananyev 		tm1 = (prcs != 0) ? rte_rdtsc_precise() - tm1 : 0;
240bf28df24SKonstantin Ananyev 
241bf28df24SKonstantin Ananyev 		/* check return value */
242bf28df24SKonstantin Ananyev 		rc = check_ring_op(num, n, lc, fname,
243bf28df24SKonstantin Ananyev 			RTE_STR(_st_ring_enqueue_bulk));
244bf28df24SKonstantin Ananyev 		if (rc != 0)
245bf28df24SKonstantin Ananyev 			break;
246bf28df24SKonstantin Ananyev 
247bf28df24SKonstantin Ananyev 		lcore_stat_update(&la->stats, 1, num, tm0 + tm1, prcs);
248bf28df24SKonstantin Ananyev 
249b6a7e685STyler Retzlaff 	} while (rte_atomic_load_explicit(&wrk_cmd, rte_memory_order_relaxed) == WRK_CMD_RUN);
250bf28df24SKonstantin Ananyev 
251bf28df24SKonstantin Ananyev 	cl = rte_rdtsc_precise() - cl;
252bf28df24SKonstantin Ananyev 	if (prcs == 0)
253bf28df24SKonstantin Ananyev 		lcore_stat_update(&la->stats, 0, 0, cl, 0);
254bf28df24SKonstantin Ananyev 	la->stats.nb_cycle = cl;
255bf28df24SKonstantin Ananyev 	return rc;
256bf28df24SKonstantin Ananyev }
257bf28df24SKonstantin Ananyev static int
258bf28df24SKonstantin Ananyev test_worker_prcs(void *arg)
259bf28df24SKonstantin Ananyev {
260bf28df24SKonstantin Ananyev 	return test_worker(arg, __func__, 1);
261bf28df24SKonstantin Ananyev }
262bf28df24SKonstantin Ananyev 
263bf28df24SKonstantin Ananyev static int
264bf28df24SKonstantin Ananyev test_worker_avg(void *arg)
265bf28df24SKonstantin Ananyev {
266bf28df24SKonstantin Ananyev 	return test_worker(arg, __func__, 0);
267bf28df24SKonstantin Ananyev }
268bf28df24SKonstantin Ananyev 
269bf28df24SKonstantin Ananyev static void
270bf28df24SKonstantin Ananyev mt1_fini(struct rte_ring *rng, void *data)
271bf28df24SKonstantin Ananyev {
272bf28df24SKonstantin Ananyev 	rte_free(rng);
273bf28df24SKonstantin Ananyev 	rte_free(data);
274bf28df24SKonstantin Ananyev }
275bf28df24SKonstantin Ananyev 
276bf28df24SKonstantin Ananyev static int
277bf28df24SKonstantin Ananyev mt1_init(struct rte_ring **rng, void **data, uint32_t num)
278bf28df24SKonstantin Ananyev {
279bf28df24SKonstantin Ananyev 	int32_t rc;
280bf28df24SKonstantin Ananyev 	size_t sz;
281bf28df24SKonstantin Ananyev 	uint32_t i, nr;
282bf28df24SKonstantin Ananyev 	struct rte_ring *r;
283bf28df24SKonstantin Ananyev 	struct ring_elem *elm;
284bf28df24SKonstantin Ananyev 	void *p;
285bf28df24SKonstantin Ananyev 
286bf28df24SKonstantin Ananyev 	*rng = NULL;
287bf28df24SKonstantin Ananyev 	*data = NULL;
288bf28df24SKonstantin Ananyev 
289bf28df24SKonstantin Ananyev 	sz = num * sizeof(*elm);
29008966fe7STyler Retzlaff 	elm = rte_zmalloc(NULL, sz, alignof(typeof(*elm)));
291bf28df24SKonstantin Ananyev 	if (elm == NULL) {
292bf28df24SKonstantin Ananyev 		printf("%s: alloc(%zu) for %u elems data failed",
293bf28df24SKonstantin Ananyev 			__func__, sz, num);
294bf28df24SKonstantin Ananyev 		return -ENOMEM;
295bf28df24SKonstantin Ananyev 	}
296bf28df24SKonstantin Ananyev 
297bf28df24SKonstantin Ananyev 	*data = elm;
298bf28df24SKonstantin Ananyev 
299bf28df24SKonstantin Ananyev 	/* alloc ring */
3002cf3a8d3SKonstantin Ananyev 	nr = rte_align32pow2(2 * num);
301bf28df24SKonstantin Ananyev 	sz = rte_ring_get_memsize(nr);
30208966fe7STyler Retzlaff 	r = rte_zmalloc(NULL, sz, alignof(typeof(*r)));
303bf28df24SKonstantin Ananyev 	if (r == NULL) {
304bf28df24SKonstantin Ananyev 		printf("%s: alloc(%zu) for FIFO with %u elems failed",
305bf28df24SKonstantin Ananyev 			__func__, sz, nr);
306bf28df24SKonstantin Ananyev 		return -ENOMEM;
307bf28df24SKonstantin Ananyev 	}
308bf28df24SKonstantin Ananyev 
309bf28df24SKonstantin Ananyev 	*rng = r;
310bf28df24SKonstantin Ananyev 
311bf28df24SKonstantin Ananyev 	rc = _st_ring_init(r, RING_NAME, nr);
312bf28df24SKonstantin Ananyev 	if (rc != 0) {
313bf28df24SKonstantin Ananyev 		printf("%s: _st_ring_init(%p, %u) failed, error: %d(%s)\n",
314bf28df24SKonstantin Ananyev 			__func__, r, nr, rc, strerror(-rc));
315bf28df24SKonstantin Ananyev 		return rc;
316bf28df24SKonstantin Ananyev 	}
317bf28df24SKonstantin Ananyev 
318bf28df24SKonstantin Ananyev 	for (i = 0; i != num; i++) {
319bf28df24SKonstantin Ananyev 		fill_ring_elm(elm + i, UINT32_MAX);
320bf28df24SKonstantin Ananyev 		p = elm + i;
321bf28df24SKonstantin Ananyev 		if (_st_ring_enqueue_bulk(r, &p, 1, NULL) != 1)
322bf28df24SKonstantin Ananyev 			break;
323bf28df24SKonstantin Ananyev 	}
324bf28df24SKonstantin Ananyev 
325bf28df24SKonstantin Ananyev 	if (i != num) {
326bf28df24SKonstantin Ananyev 		printf("%s: _st_ring_enqueue(%p, %u) returned %u\n",
327bf28df24SKonstantin Ananyev 			__func__, r, num, i);
328bf28df24SKonstantin Ananyev 		return -ENOSPC;
329bf28df24SKonstantin Ananyev 	}
330bf28df24SKonstantin Ananyev 
331bf28df24SKonstantin Ananyev 	return 0;
332bf28df24SKonstantin Ananyev }
333bf28df24SKonstantin Ananyev 
334bf28df24SKonstantin Ananyev static int
335bf28df24SKonstantin Ananyev test_mt1(int (*test)(void *))
336bf28df24SKonstantin Ananyev {
337bf28df24SKonstantin Ananyev 	int32_t rc;
338bf28df24SKonstantin Ananyev 	uint32_t lc, mc;
339bf28df24SKonstantin Ananyev 	struct rte_ring *r;
340bf28df24SKonstantin Ananyev 	void *data;
341bf28df24SKonstantin Ananyev 	struct lcore_arg arg[RTE_MAX_LCORE];
342bf28df24SKonstantin Ananyev 
343bf28df24SKonstantin Ananyev 	static const struct lcore_stat init_stat = {
344bf28df24SKonstantin Ananyev 		.op.min_cycle = UINT64_MAX,
345bf28df24SKonstantin Ananyev 	};
346bf28df24SKonstantin Ananyev 
347bf28df24SKonstantin Ananyev 	rc = mt1_init(&r, &data, RING_SIZE);
348bf28df24SKonstantin Ananyev 	if (rc != 0) {
349bf28df24SKonstantin Ananyev 		mt1_fini(r, data);
350bf28df24SKonstantin Ananyev 		return rc;
351bf28df24SKonstantin Ananyev 	}
352bf28df24SKonstantin Ananyev 
353bf28df24SKonstantin Ananyev 	memset(arg, 0, sizeof(arg));
354bf28df24SKonstantin Ananyev 
355cb056611SStephen Hemminger 	/* launch on all workers */
356cb056611SStephen Hemminger 	RTE_LCORE_FOREACH_WORKER(lc) {
357bf28df24SKonstantin Ananyev 		arg[lc].rng = r;
358bf28df24SKonstantin Ananyev 		arg[lc].stats = init_stat;
359bf28df24SKonstantin Ananyev 		rte_eal_remote_launch(test, &arg[lc], lc);
360bf28df24SKonstantin Ananyev 	}
361bf28df24SKonstantin Ananyev 
362bf28df24SKonstantin Ananyev 	/* signal worker to start test */
363b6a7e685STyler Retzlaff 	rte_atomic_store_explicit(&wrk_cmd, WRK_CMD_RUN, rte_memory_order_release);
364bf28df24SKonstantin Ananyev 
365987d40a0SJie Zhou 	rte_delay_us(run_time * US_PER_S);
366bf28df24SKonstantin Ananyev 
367bf28df24SKonstantin Ananyev 	/* signal worker to start test */
368b6a7e685STyler Retzlaff 	rte_atomic_store_explicit(&wrk_cmd, WRK_CMD_STOP, rte_memory_order_release);
369bf28df24SKonstantin Ananyev 
370cb056611SStephen Hemminger 	/* wait for workers and collect stats. */
371bf28df24SKonstantin Ananyev 	mc = rte_lcore_id();
372bf28df24SKonstantin Ananyev 	arg[mc].stats = init_stat;
373bf28df24SKonstantin Ananyev 
374bf28df24SKonstantin Ananyev 	rc = 0;
375cb056611SStephen Hemminger 	RTE_LCORE_FOREACH_WORKER(lc) {
376bf28df24SKonstantin Ananyev 		rc |= rte_eal_wait_lcore(lc);
377bf28df24SKonstantin Ananyev 		lcore_stat_aggr(&arg[mc].stats, &arg[lc].stats);
378bf28df24SKonstantin Ananyev 		if (verbose != 0)
379bf28df24SKonstantin Ananyev 			lcore_stat_dump(stdout, lc, &arg[lc].stats);
380bf28df24SKonstantin Ananyev 	}
381bf28df24SKonstantin Ananyev 
382bf28df24SKonstantin Ananyev 	lcore_stat_dump(stdout, UINT32_MAX, &arg[mc].stats);
383*700989f5SEimear Morrissey 	rte_ring_dump(stdout, r);
384bf28df24SKonstantin Ananyev 	mt1_fini(r, data);
385bf28df24SKonstantin Ananyev 	return rc;
386bf28df24SKonstantin Ananyev }
387bf28df24SKonstantin Ananyev 
388bf28df24SKonstantin Ananyev static const struct test_case tests[] = {
389bf28df24SKonstantin Ananyev 	{
390bf28df24SKonstantin Ananyev 		.name = "MT-WRK_ENQ_DEQ-MST_NONE-PRCS",
391bf28df24SKonstantin Ananyev 		.func = test_mt1,
392bf28df24SKonstantin Ananyev 		.wfunc = test_worker_prcs,
393bf28df24SKonstantin Ananyev 	},
394bf28df24SKonstantin Ananyev 	{
395bf28df24SKonstantin Ananyev 		.name = "MT-WRK_ENQ_DEQ-MST_NONE-AVG",
396bf28df24SKonstantin Ananyev 		.func = test_mt1,
397bf28df24SKonstantin Ananyev 		.wfunc = test_worker_avg,
398bf28df24SKonstantin Ananyev 	},
399bf28df24SKonstantin Ananyev };
400