xref: /dpdk/app/test/test_dmadev_api.c (revision 0623f2758baa4bf1c347439ecf1ca03543b7b381)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2021 HiSilicon Limited
3  */
4 
5 #include <string.h>
6 
7 #include <rte_cycles.h>
8 #include <rte_malloc.h>
9 #include <rte_test.h>
10 #include <rte_dmadev.h>
11 
12 #include "test.h"
13 #include "test_dmadev_api.h"
14 
15 extern int test_dma_api(uint16_t dev_id);
16 
17 #define TEST_MEMCPY_SIZE	1024
18 #define TEST_WAIT_US_VAL	50000
19 #define TEST_SG_MAX		64
20 
21 static int16_t test_dev_id;
22 static int16_t invalid_dev_id;
23 
24 static char *src;
25 static char *dst;
26 static char *src_sg[TEST_SG_MAX];
27 static char *dst_sg[TEST_SG_MAX];
28 
29 static int
testsuite_setup(void)30 testsuite_setup(void)
31 {
32 	invalid_dev_id = -1;
33 	int i, rc = 0;
34 
35 	for (i = 0; i < TEST_SG_MAX; i++) {
36 		src_sg[i] = rte_malloc("dmadev_test_src", TEST_MEMCPY_SIZE, 0);
37 		if (src_sg[i] == NULL) {
38 			rc = -ENOMEM;
39 			goto exit;
40 		}
41 
42 		dst_sg[i] = rte_malloc("dmadev_test_dst", TEST_MEMCPY_SIZE, 0);
43 		if (dst_sg[i] == NULL) {
44 			rte_free(src_sg[i]);
45 			src_sg[i] = NULL;
46 			rc = -ENOMEM;
47 			goto exit;
48 		}
49 	}
50 
51 	src = src_sg[0];
52 	dst = dst_sg[0];
53 
54 	/* Set dmadev log level to critical to suppress unnecessary output
55 	 * during API tests.
56 	 */
57 	rte_log_set_level_pattern("lib.dmadev", RTE_LOG_CRIT);
58 
59 	return rc;
60 exit:
61 	while (--i >= 0) {
62 		rte_free(src_sg[i]);
63 		rte_free(dst_sg[i]);
64 	}
65 
66 	return rc;
67 }
68 
69 static void
testsuite_teardown(void)70 testsuite_teardown(void)
71 {
72 	int i;
73 
74 	for (i = 0; i < TEST_SG_MAX; i++) {
75 		rte_free(src_sg[i]);
76 		src_sg[i] = NULL;
77 		rte_free(dst_sg[i]);
78 		dst_sg[i] = NULL;
79 	}
80 
81 	src = NULL;
82 	dst = NULL;
83 	/* Ensure the dmadev is stopped. */
84 	rte_dma_stop(test_dev_id);
85 	rte_dma_stats_reset(test_dev_id, RTE_DMA_ALL_VCHAN);
86 
87 	rte_log_set_level_pattern("lib.dmadev", RTE_LOG_INFO);
88 }
89 
90 static int
test_dma_get_dev_id_by_name(void)91 test_dma_get_dev_id_by_name(void)
92 {
93 	int ret = rte_dma_get_dev_id_by_name("invalid_dmadev_device");
94 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
95 	return TEST_SUCCESS;
96 }
97 
98 static int
test_dma_is_valid_dev(void)99 test_dma_is_valid_dev(void)
100 {
101 	int ret;
102 	ret = rte_dma_is_valid(invalid_dev_id);
103 	RTE_TEST_ASSERT(ret == false, "Expected false for invalid dev id");
104 	ret = rte_dma_is_valid(test_dev_id);
105 	RTE_TEST_ASSERT(ret == true, "Expected true for valid dev id");
106 	return TEST_SUCCESS;
107 }
108 
109 static int
test_dma_count(void)110 test_dma_count(void)
111 {
112 	uint16_t count = rte_dma_count_avail();
113 	RTE_TEST_ASSERT(count > 0, "Invalid dmadev count %u", count);
114 	return TEST_SUCCESS;
115 }
116 
117 static int
test_dma_info_get(void)118 test_dma_info_get(void)
119 {
120 	struct rte_dma_info info =  { 0 };
121 	int ret;
122 
123 	ret = rte_dma_info_get(invalid_dev_id, &info);
124 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
125 	ret = rte_dma_info_get(test_dev_id, NULL);
126 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
127 	ret = rte_dma_info_get(test_dev_id, &info);
128 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
129 
130 	return TEST_SUCCESS;
131 }
132 
133 static int
test_dma_configure(void)134 test_dma_configure(void)
135 {
136 	struct rte_dma_conf conf = { 0 };
137 	struct rte_dma_info info = { 0 };
138 	int ret;
139 
140 	/* Check for invalid parameters */
141 	ret = rte_dma_configure(invalid_dev_id, &conf);
142 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
143 	ret = rte_dma_configure(test_dev_id, NULL);
144 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
145 
146 	/* Check for nb_vchans == 0 */
147 	memset(&conf, 0, sizeof(conf));
148 	ret = rte_dma_configure(test_dev_id, &conf);
149 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
150 
151 	/* Check for conf.nb_vchans > info.max_vchans */
152 	ret = rte_dma_info_get(test_dev_id, &info);
153 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
154 	memset(&conf, 0, sizeof(conf));
155 	conf.nb_vchans = info.max_vchans + 1;
156 	ret = rte_dma_configure(test_dev_id, &conf);
157 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
158 
159 	/* Check enable silent mode */
160 	memset(&conf, 0, sizeof(conf));
161 	conf.nb_vchans = info.max_vchans;
162 	conf.enable_silent = true;
163 	ret = rte_dma_configure(test_dev_id, &conf);
164 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
165 
166 	/* Configure success */
167 	memset(&conf, 0, sizeof(conf));
168 	conf.nb_vchans = info.max_vchans;
169 	ret = rte_dma_configure(test_dev_id, &conf);
170 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
171 
172 	/* Check configure success */
173 	ret = rte_dma_info_get(test_dev_id, &info);
174 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
175 	RTE_TEST_ASSERT_EQUAL(conf.nb_vchans, info.nb_vchans,
176 			      "Configure nb_vchans not match");
177 
178 	return TEST_SUCCESS;
179 }
180 
181 static int
check_direction(void)182 check_direction(void)
183 {
184 	struct rte_dma_vchan_conf vchan_conf;
185 	int ret;
186 
187 	/* Check for direction */
188 	memset(&vchan_conf, 0, sizeof(vchan_conf));
189 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV + 1;
190 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
191 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
192 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM - 1;
193 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
194 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
195 
196 	/* Check for direction and dev_capa combination */
197 	memset(&vchan_conf, 0, sizeof(vchan_conf));
198 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_DEV;
199 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
200 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
201 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_MEM;
202 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
203 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
204 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV;
205 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
206 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
207 
208 	return 0;
209 }
210 
211 static int
check_port_type(struct rte_dma_info * dev_info)212 check_port_type(struct rte_dma_info *dev_info)
213 {
214 	struct rte_dma_vchan_conf vchan_conf;
215 	int ret;
216 
217 	/* Check src port type validation */
218 	memset(&vchan_conf, 0, sizeof(vchan_conf));
219 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
220 	vchan_conf.nb_desc = dev_info->min_desc;
221 	vchan_conf.src_port.port_type = RTE_DMA_PORT_PCIE;
222 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
223 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
224 
225 	/* Check dst port type validation */
226 	memset(&vchan_conf, 0, sizeof(vchan_conf));
227 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
228 	vchan_conf.nb_desc = dev_info->min_desc;
229 	vchan_conf.dst_port.port_type = RTE_DMA_PORT_PCIE;
230 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
231 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
232 
233 	return 0;
234 }
235 
236 static int
test_dma_vchan_setup(void)237 test_dma_vchan_setup(void)
238 {
239 	struct rte_dma_vchan_conf vchan_conf = { 0 };
240 	struct rte_dma_conf dev_conf = { 0 };
241 	struct rte_dma_info dev_info = { 0 };
242 	int ret;
243 
244 	/* Check for invalid parameters */
245 	ret = rte_dma_vchan_setup(invalid_dev_id, 0, &vchan_conf);
246 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
247 	ret = rte_dma_vchan_setup(test_dev_id, 0, NULL);
248 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
249 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
250 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
251 
252 	/* Make sure configure success */
253 	ret = rte_dma_info_get(test_dev_id, &dev_info);
254 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
255 	dev_conf.nb_vchans = dev_info.max_vchans;
256 	ret = rte_dma_configure(test_dev_id, &dev_conf);
257 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
258 
259 	/* Check for invalid vchan */
260 	ret = rte_dma_vchan_setup(test_dev_id, dev_conf.nb_vchans, &vchan_conf);
261 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
262 
263 	/* Check for direction */
264 	ret = check_direction();
265 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check direction");
266 
267 	/* Check for nb_desc validation */
268 	memset(&vchan_conf, 0, sizeof(vchan_conf));
269 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
270 	vchan_conf.nb_desc = dev_info.min_desc - 1;
271 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
272 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
273 	vchan_conf.nb_desc = dev_info.max_desc + 1;
274 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
275 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
276 
277 	/* Check port type */
278 	ret = check_port_type(&dev_info);
279 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check port type");
280 
281 	/* Check vchan setup success */
282 	memset(&vchan_conf, 0, sizeof(vchan_conf));
283 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
284 	vchan_conf.nb_desc = dev_info.min_desc;
285 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
286 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan, %d", ret);
287 
288 	return TEST_SUCCESS;
289 }
290 
291 static int
setup_vchan(int nb_vchans)292 setup_vchan(int nb_vchans)
293 {
294 	struct rte_dma_vchan_conf vchan_conf = { 0 };
295 	struct rte_dma_info dev_info = { 0 };
296 	struct rte_dma_conf dev_conf = { 0 };
297 	int ret;
298 
299 	ret = rte_dma_info_get(test_dev_id, &dev_info);
300 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
301 	dev_conf.nb_vchans = nb_vchans;
302 	ret = rte_dma_configure(test_dev_id, &dev_conf);
303 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure, %d", ret);
304 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
305 	vchan_conf.nb_desc = dev_info.min_desc;
306 	for (int i = 0; i < nb_vchans; i++) {
307 		ret = rte_dma_vchan_setup(test_dev_id, i, &vchan_conf);
308 		RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan %d, %d", i, ret);
309 	}
310 
311 	return TEST_SUCCESS;
312 }
313 
314 static int
test_dma_start_stop(void)315 test_dma_start_stop(void)
316 {
317 	struct rte_dma_vchan_conf vchan_conf = { 0 };
318 	struct rte_dma_conf dev_conf = { 0 };
319 	int ret;
320 
321 	/* Check for invalid parameters */
322 	ret = rte_dma_start(invalid_dev_id);
323 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
324 	ret = rte_dma_stop(invalid_dev_id);
325 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
326 
327 	/* Setup one vchan for later test */
328 	ret = setup_vchan(1);
329 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
330 
331 	ret = rte_dma_start(test_dev_id);
332 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
333 
334 	/* Check reconfigure and vchan setup when device started */
335 	ret = rte_dma_configure(test_dev_id, &dev_conf);
336 	RTE_TEST_ASSERT(ret == -EBUSY, "Failed to configure, %d", ret);
337 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
338 	RTE_TEST_ASSERT(ret == -EBUSY, "Failed to setup vchan, %d", ret);
339 
340 	ret = rte_dma_stop(test_dev_id);
341 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
342 
343 	return TEST_SUCCESS;
344 }
345 
346 static int
test_dma_reconfigure(void)347 test_dma_reconfigure(void)
348 {
349 	struct rte_dma_conf dev_conf = { 0 };
350 	struct rte_dma_info dev_info = { 0 };
351 	uint16_t cfg_vchans;
352 	int ret;
353 
354 	ret = rte_dma_info_get(test_dev_id, &dev_info);
355 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
356 
357 	/* At least two vchans required for the test */
358 	if (dev_info.max_vchans < 2)
359 		return TEST_SKIPPED;
360 
361 	/* Setup one vchan for later test */
362 	ret = setup_vchan(1);
363 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
364 
365 	ret = rte_dma_start(test_dev_id);
366 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
367 
368 	ret = rte_dma_stop(test_dev_id);
369 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
370 
371 	/* Check reconfigure and vchan setup after device stopped */
372 	cfg_vchans = dev_conf.nb_vchans = (dev_info.max_vchans - 1);
373 
374 	ret = setup_vchan(cfg_vchans);
375 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
376 
377 	ret = rte_dma_start(test_dev_id);
378 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
379 
380 	ret = rte_dma_info_get(test_dev_id, &dev_info);
381 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
382 	RTE_TEST_ASSERT_EQUAL(dev_info.nb_vchans, cfg_vchans, "incorrect reconfiguration");
383 
384 	ret = rte_dma_stop(test_dev_id);
385 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
386 
387 	return TEST_SUCCESS;
388 }
389 
390 static int
test_dma_stats(void)391 test_dma_stats(void)
392 {
393 	struct rte_dma_info dev_info = { 0 };
394 	struct rte_dma_stats stats = { 0 };
395 	int ret;
396 
397 	/* Check for invalid parameters */
398 	ret = rte_dma_stats_get(invalid_dev_id, 0, &stats);
399 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
400 	ret = rte_dma_stats_get(invalid_dev_id, 0, NULL);
401 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
402 	ret = rte_dma_stats_reset(invalid_dev_id, 0);
403 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
404 
405 	/* Setup one vchan for later test */
406 	ret = setup_vchan(1);
407 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
408 
409 	/* Check for invalid vchan */
410 	ret = rte_dma_info_get(test_dev_id, &dev_info);
411 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
412 	ret = rte_dma_stats_get(test_dev_id, dev_info.max_vchans, &stats);
413 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
414 	ret = rte_dma_stats_reset(test_dev_id, dev_info.max_vchans);
415 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
416 
417 	/* Check for valid vchan */
418 	ret = rte_dma_stats_get(test_dev_id, 0, &stats);
419 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get stats, %d", ret);
420 	ret = rte_dma_stats_get(test_dev_id, RTE_DMA_ALL_VCHAN, &stats);
421 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get all stats, %d", ret);
422 	ret = rte_dma_stats_reset(test_dev_id, 0);
423 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset stats, %d", ret);
424 	ret = rte_dma_stats_reset(test_dev_id, RTE_DMA_ALL_VCHAN);
425 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset all stats, %d", ret);
426 
427 	return TEST_SUCCESS;
428 }
429 
430 static int
test_dma_dump(void)431 test_dma_dump(void)
432 {
433 	int ret;
434 
435 	/* Check for invalid parameters */
436 	ret = rte_dma_dump(invalid_dev_id, stderr);
437 	RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
438 	ret = rte_dma_dump(test_dev_id, NULL);
439 	RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
440 
441 	return TEST_SUCCESS;
442 }
443 
444 static void
setup_memory(void)445 setup_memory(void)
446 {
447 	int i;
448 
449 	for (i = 0; i < TEST_MEMCPY_SIZE; i++)
450 		src[i] = (char)i;
451 	memset(dst, 0, TEST_MEMCPY_SIZE);
452 }
453 
454 static int
verify_memory(void)455 verify_memory(void)
456 {
457 	int i;
458 
459 	for (i = 0; i < TEST_MEMCPY_SIZE; i++) {
460 		if (src[i] == dst[i])
461 			continue;
462 		RTE_TEST_ASSERT_EQUAL(src[i], dst[i],
463 			"Failed to copy memory, %d %d", src[i], dst[i]);
464 	}
465 
466 	return 0;
467 }
468 
469 static void
sg_memory_setup(int n)470 sg_memory_setup(int n)
471 {
472 	int i, j;
473 
474 	for (i = 0; i < n; i++) {
475 		for (j = 0; j < TEST_MEMCPY_SIZE; j++)
476 			src_sg[i][j] = (char)j;
477 
478 		memset(dst_sg[i], 0, TEST_MEMCPY_SIZE);
479 	}
480 }
481 
482 static int
sg_memory_verify(int n)483 sg_memory_verify(int n)
484 {
485 	int i, j;
486 
487 	for (i = 0; i < n; i++) {
488 		for (j = 0; j < TEST_MEMCPY_SIZE; j++) {
489 			if (src_sg[i][j] == dst_sg[i][j])
490 				continue;
491 
492 			RTE_TEST_ASSERT_EQUAL(src_sg[i][j], dst_sg[i][j], "Failed to copy memory, %d %d",
493 				src_sg[i][j], dst_sg[i][j]);
494 		}
495 	}
496 
497 	return 0;
498 }
499 
500 static int
test_dma_completed(void)501 test_dma_completed(void)
502 {
503 	uint16_t last_idx = 1;
504 	bool has_error = true;
505 	uint16_t cpl_ret;
506 	int ret;
507 
508 	/* Setup one vchan for later test */
509 	ret = setup_vchan(1);
510 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
511 
512 	ret = rte_dma_start(test_dev_id);
513 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
514 
515 	setup_memory();
516 
517 	/* Check enqueue without submit */
518 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
519 			   TEST_MEMCPY_SIZE, 0);
520 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
521 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
522 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
523 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to get completed");
524 
525 	/* Check add submit */
526 	ret = rte_dma_submit(test_dev_id, 0);
527 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to submit, %d", ret);
528 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
529 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
530 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
531 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
532 				last_idx);
533 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
534 	ret = verify_memory();
535 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
536 
537 	setup_memory();
538 
539 	/* Check for enqueue with submit */
540 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
541 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
542 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
543 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
544 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
545 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
546 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
547 				last_idx);
548 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
549 	ret = verify_memory();
550 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
551 
552 	/* Stop dmadev to make sure dmadev to a known state */
553 	ret = rte_dma_stop(test_dev_id);
554 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
555 
556 	return TEST_SUCCESS;
557 }
558 
559 static int
test_dma_completed_status(void)560 test_dma_completed_status(void)
561 {
562 	enum rte_dma_status_code status[1] = { 1 };
563 	uint16_t last_idx = 1;
564 	uint16_t cpl_ret, i;
565 	int ret;
566 
567 	/* Setup one vchan for later test */
568 	ret = setup_vchan(1);
569 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
570 
571 	ret = rte_dma_start(test_dev_id);
572 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
573 
574 	/* Check for enqueue with submit */
575 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
576 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
577 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
578 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
579 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
580 					   status);
581 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
582 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
583 				last_idx);
584 	for (i = 0; i < RTE_DIM(status); i++)
585 		RTE_TEST_ASSERT_EQUAL(status[i], 0,
586 				"Failed to completed status, %d", status[i]);
587 
588 	/* Check do completed status again */
589 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
590 					   status);
591 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to completed status");
592 
593 	/* Check for enqueue with submit again */
594 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
595 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
596 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
597 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
598 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
599 					   status);
600 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
601 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
602 				last_idx);
603 	for (i = 0; i < RTE_DIM(status); i++)
604 		RTE_TEST_ASSERT_EQUAL(status[i], 0,
605 				"Failed to completed status, %d", status[i]);
606 
607 	/* Stop dmadev to make sure dmadev to a known state */
608 	ret = rte_dma_stop(test_dev_id);
609 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
610 
611 	return TEST_SUCCESS;
612 }
613 
614 static int
test_dma_sg(void)615 test_dma_sg(void)
616 {
617 	struct rte_dma_sge src_sge[TEST_SG_MAX], dst_sge[TEST_SG_MAX];
618 	struct rte_dma_info dev_info = { 0 };
619 	uint16_t last_idx = -1;
620 	bool has_error = true;
621 	int n_sge, i, ret;
622 	uint16_t cpl_ret;
623 
624 	ret = rte_dma_info_get(test_dev_id, &dev_info);
625 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
626 
627 	if ((dev_info.dev_capa & RTE_DMA_CAPA_OPS_COPY_SG) == 0)
628 		return TEST_SKIPPED;
629 
630 	n_sge = RTE_MIN(dev_info.max_sges, TEST_SG_MAX);
631 
632 	ret = setup_vchan(1);
633 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
634 
635 	ret = rte_dma_start(test_dev_id);
636 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
637 
638 	for (i = 0; i < n_sge; i++) {
639 		src_sge[i].addr = rte_malloc_virt2iova(src_sg[i]);
640 		src_sge[i].length = TEST_MEMCPY_SIZE;
641 		dst_sge[i].addr = rte_malloc_virt2iova(dst_sg[i]);
642 		dst_sge[i].length = TEST_MEMCPY_SIZE;
643 	}
644 
645 	sg_memory_setup(n_sge);
646 
647 	/* Check enqueue without submit */
648 	ret = rte_dma_copy_sg(test_dev_id, 0, src_sge, dst_sge, n_sge, n_sge, 0);
649 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
650 
651 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
652 
653 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
654 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to get completed");
655 
656 	/* Check DMA submit */
657 	ret = rte_dma_submit(test_dev_id, 0);
658 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to submit, %d", ret);
659 
660 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
661 
662 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
663 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
664 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u", last_idx);
665 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
666 
667 	ret = sg_memory_verify(n_sge);
668 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
669 
670 	sg_memory_setup(n_sge);
671 
672 	/* Check for enqueue with submit */
673 	ret = rte_dma_copy_sg(test_dev_id, 0, src_sge, dst_sge, n_sge, n_sge,
674 			      RTE_DMA_OP_FLAG_SUBMIT);
675 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
676 
677 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
678 
679 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
680 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
681 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u", last_idx);
682 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
683 
684 	ret = sg_memory_verify(n_sge);
685 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
686 
687 	/* Stop dmadev to make sure dmadev to a known state */
688 	ret = rte_dma_stop(test_dev_id);
689 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
690 
691 	return TEST_SUCCESS;
692 }
693 
694 static struct unit_test_suite dma_api_testsuite = {
695 	.suite_name = "DMA API Test Suite",
696 	.setup = testsuite_setup,
697 	.teardown = testsuite_teardown,
698 	.unit_test_cases = {
699 		TEST_CASE(test_dma_get_dev_id_by_name),
700 		TEST_CASE(test_dma_is_valid_dev),
701 		TEST_CASE(test_dma_count),
702 		TEST_CASE(test_dma_info_get),
703 		TEST_CASE(test_dma_configure),
704 		TEST_CASE(test_dma_vchan_setup),
705 		TEST_CASE(test_dma_start_stop),
706 		TEST_CASE(test_dma_reconfigure),
707 		TEST_CASE(test_dma_stats),
708 		TEST_CASE(test_dma_dump),
709 		TEST_CASE(test_dma_completed),
710 		TEST_CASE(test_dma_completed_status),
711 		TEST_CASE(test_dma_sg),
712 		TEST_CASES_END()
713 	}
714 };
715 
716 int
test_dma_api(uint16_t dev_id)717 test_dma_api(uint16_t dev_id)
718 {
719 	struct rte_dma_info dev_info;
720 
721 	if (rte_dma_info_get(dev_id, &dev_info) < 0)
722 		return TEST_SKIPPED;
723 
724 	printf("\n### Test dmadev infrastructure using %u [%s]\n", dev_id, dev_info.dev_name);
725 	test_dev_id = dev_id;
726 	return unit_test_suite_runner(&dma_api_testsuite);
727 };
728