xref: /dpdk/app/test/test_dmadev_api.c (revision 1fbb3977cb4cc95a88a383825b188398659883ea)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2021 HiSilicon Limited
3  */
4 
5 #include <string.h>
6 
7 #include <rte_cycles.h>
8 #include <rte_malloc.h>
9 #include <rte_test.h>
10 #include <rte_dmadev.h>
11 
12 #include "test.h"
13 #include "test_dmadev_api.h"
14 
15 extern int test_dma_api(uint16_t dev_id);
16 
17 #define TEST_MEMCPY_SIZE	1024
18 #define TEST_WAIT_US_VAL	50000
19 #define TEST_SG_MAX		64
20 
21 static int16_t test_dev_id;
22 static int16_t invalid_dev_id;
23 
24 static char *src;
25 static char *dst;
26 static char *src_sg[TEST_SG_MAX];
27 static char *dst_sg[TEST_SG_MAX];
28 
29 static int
30 testsuite_setup(void)
31 {
32 	invalid_dev_id = -1;
33 	int i, rc = 0;
34 
35 	for (i = 0; i < TEST_SG_MAX; i++) {
36 		src_sg[i] = rte_malloc("dmadev_test_src", TEST_MEMCPY_SIZE, 0);
37 		if (src_sg[i] == NULL) {
38 			rc = -ENOMEM;
39 			goto exit;
40 		}
41 
42 		dst_sg[i] = rte_malloc("dmadev_test_dst", TEST_MEMCPY_SIZE, 0);
43 		if (dst_sg[i] == NULL) {
44 			rte_free(src_sg[i]);
45 			src_sg[i] = NULL;
46 			rc = -ENOMEM;
47 			goto exit;
48 		}
49 	}
50 
51 	src = src_sg[0];
52 	dst = dst_sg[0];
53 
54 	/* Set dmadev log level to critical to suppress unnecessary output
55 	 * during API tests.
56 	 */
57 	rte_log_set_level_pattern("lib.dmadev", RTE_LOG_CRIT);
58 
59 	return rc;
60 exit:
61 	while (--i >= 0) {
62 		rte_free(src_sg[i]);
63 		rte_free(dst_sg[i]);
64 	}
65 
66 	return rc;
67 }
68 
69 static void
70 testsuite_teardown(void)
71 {
72 	int i;
73 
74 	for (i = 0; i < TEST_SG_MAX; i++) {
75 		rte_free(src_sg[i]);
76 		src_sg[i] = NULL;
77 		rte_free(dst_sg[i]);
78 		dst_sg[i] = NULL;
79 	}
80 
81 	src = NULL;
82 	dst = NULL;
83 	/* Ensure the dmadev is stopped. */
84 	rte_dma_stop(test_dev_id);
85 
86 	rte_log_set_level_pattern("lib.dmadev", RTE_LOG_INFO);
87 }
88 
89 static int
90 test_dma_get_dev_id_by_name(void)
91 {
92 	int ret = rte_dma_get_dev_id_by_name("invalid_dmadev_device");
93 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
94 	return TEST_SUCCESS;
95 }
96 
97 static int
98 test_dma_is_valid_dev(void)
99 {
100 	int ret;
101 	ret = rte_dma_is_valid(invalid_dev_id);
102 	RTE_TEST_ASSERT(ret == false, "Expected false for invalid dev id");
103 	ret = rte_dma_is_valid(test_dev_id);
104 	RTE_TEST_ASSERT(ret == true, "Expected true for valid dev id");
105 	return TEST_SUCCESS;
106 }
107 
108 static int
109 test_dma_count(void)
110 {
111 	uint16_t count = rte_dma_count_avail();
112 	RTE_TEST_ASSERT(count > 0, "Invalid dmadev count %u", count);
113 	return TEST_SUCCESS;
114 }
115 
116 static int
117 test_dma_info_get(void)
118 {
119 	struct rte_dma_info info =  { 0 };
120 	int ret;
121 
122 	ret = rte_dma_info_get(invalid_dev_id, &info);
123 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
124 	ret = rte_dma_info_get(test_dev_id, NULL);
125 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
126 	ret = rte_dma_info_get(test_dev_id, &info);
127 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
128 
129 	return TEST_SUCCESS;
130 }
131 
132 static int
133 test_dma_configure(void)
134 {
135 	struct rte_dma_conf conf = { 0 };
136 	struct rte_dma_info info = { 0 };
137 	int ret;
138 
139 	/* Check for invalid parameters */
140 	ret = rte_dma_configure(invalid_dev_id, &conf);
141 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
142 	ret = rte_dma_configure(test_dev_id, NULL);
143 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
144 
145 	/* Check for nb_vchans == 0 */
146 	memset(&conf, 0, sizeof(conf));
147 	ret = rte_dma_configure(test_dev_id, &conf);
148 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
149 
150 	/* Check for conf.nb_vchans > info.max_vchans */
151 	ret = rte_dma_info_get(test_dev_id, &info);
152 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
153 	memset(&conf, 0, sizeof(conf));
154 	conf.nb_vchans = info.max_vchans + 1;
155 	ret = rte_dma_configure(test_dev_id, &conf);
156 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
157 
158 	/* Check enable silent mode */
159 	memset(&conf, 0, sizeof(conf));
160 	conf.nb_vchans = info.max_vchans;
161 	conf.enable_silent = true;
162 	ret = rte_dma_configure(test_dev_id, &conf);
163 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
164 
165 	/* Configure success */
166 	memset(&conf, 0, sizeof(conf));
167 	conf.nb_vchans = info.max_vchans;
168 	ret = rte_dma_configure(test_dev_id, &conf);
169 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
170 
171 	/* Check configure success */
172 	ret = rte_dma_info_get(test_dev_id, &info);
173 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
174 	RTE_TEST_ASSERT_EQUAL(conf.nb_vchans, info.nb_vchans,
175 			      "Configure nb_vchans not match");
176 
177 	return TEST_SUCCESS;
178 }
179 
180 static int
181 check_direction(void)
182 {
183 	struct rte_dma_vchan_conf vchan_conf;
184 	int ret;
185 
186 	/* Check for direction */
187 	memset(&vchan_conf, 0, sizeof(vchan_conf));
188 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV + 1;
189 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
190 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
191 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM - 1;
192 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
193 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
194 
195 	/* Check for direction and dev_capa combination */
196 	memset(&vchan_conf, 0, sizeof(vchan_conf));
197 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_DEV;
198 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
199 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
200 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_MEM;
201 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
202 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
203 	vchan_conf.direction = RTE_DMA_DIR_DEV_TO_DEV;
204 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
205 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
206 
207 	return 0;
208 }
209 
210 static int
211 check_port_type(struct rte_dma_info *dev_info)
212 {
213 	struct rte_dma_vchan_conf vchan_conf;
214 	int ret;
215 
216 	/* Check src port type validation */
217 	memset(&vchan_conf, 0, sizeof(vchan_conf));
218 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
219 	vchan_conf.nb_desc = dev_info->min_desc;
220 	vchan_conf.src_port.port_type = RTE_DMA_PORT_PCIE;
221 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
222 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
223 
224 	/* Check dst port type validation */
225 	memset(&vchan_conf, 0, sizeof(vchan_conf));
226 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
227 	vchan_conf.nb_desc = dev_info->min_desc;
228 	vchan_conf.dst_port.port_type = RTE_DMA_PORT_PCIE;
229 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
230 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
231 
232 	return 0;
233 }
234 
235 static int
236 test_dma_vchan_setup(void)
237 {
238 	struct rte_dma_vchan_conf vchan_conf = { 0 };
239 	struct rte_dma_conf dev_conf = { 0 };
240 	struct rte_dma_info dev_info = { 0 };
241 	int ret;
242 
243 	/* Check for invalid parameters */
244 	ret = rte_dma_vchan_setup(invalid_dev_id, 0, &vchan_conf);
245 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
246 	ret = rte_dma_vchan_setup(test_dev_id, 0, NULL);
247 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
248 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
249 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
250 
251 	/* Make sure configure success */
252 	ret = rte_dma_info_get(test_dev_id, &dev_info);
253 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info");
254 	dev_conf.nb_vchans = dev_info.max_vchans;
255 	ret = rte_dma_configure(test_dev_id, &dev_conf);
256 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure dmadev, %d", ret);
257 
258 	/* Check for invalid vchan */
259 	ret = rte_dma_vchan_setup(test_dev_id, dev_conf.nb_vchans, &vchan_conf);
260 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
261 
262 	/* Check for direction */
263 	ret = check_direction();
264 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check direction");
265 
266 	/* Check for nb_desc validation */
267 	memset(&vchan_conf, 0, sizeof(vchan_conf));
268 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
269 	vchan_conf.nb_desc = dev_info.min_desc - 1;
270 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
271 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
272 	vchan_conf.nb_desc = dev_info.max_desc + 1;
273 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
274 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
275 
276 	/* Check port type */
277 	ret = check_port_type(&dev_info);
278 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to check port type");
279 
280 	/* Check vchan setup success */
281 	memset(&vchan_conf, 0, sizeof(vchan_conf));
282 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
283 	vchan_conf.nb_desc = dev_info.min_desc;
284 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
285 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan, %d", ret);
286 
287 	return TEST_SUCCESS;
288 }
289 
290 static int
291 setup_vchan(int nb_vchans)
292 {
293 	struct rte_dma_vchan_conf vchan_conf = { 0 };
294 	struct rte_dma_info dev_info = { 0 };
295 	struct rte_dma_conf dev_conf = { 0 };
296 	int ret;
297 
298 	ret = rte_dma_info_get(test_dev_id, &dev_info);
299 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
300 	dev_conf.nb_vchans = nb_vchans;
301 	ret = rte_dma_configure(test_dev_id, &dev_conf);
302 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to configure, %d", ret);
303 	vchan_conf.direction = RTE_DMA_DIR_MEM_TO_MEM;
304 	vchan_conf.nb_desc = dev_info.min_desc;
305 	for (int i = 0; i < nb_vchans; i++) {
306 		ret = rte_dma_vchan_setup(test_dev_id, i, &vchan_conf);
307 		RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup vchan %d, %d", i, ret);
308 	}
309 
310 	return TEST_SUCCESS;
311 }
312 
313 static int
314 test_dma_start_stop(void)
315 {
316 	struct rte_dma_vchan_conf vchan_conf = { 0 };
317 	struct rte_dma_conf dev_conf = { 0 };
318 	int ret;
319 
320 	/* Check for invalid parameters */
321 	ret = rte_dma_start(invalid_dev_id);
322 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
323 	ret = rte_dma_stop(invalid_dev_id);
324 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
325 
326 	/* Setup one vchan for later test */
327 	ret = setup_vchan(1);
328 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
329 
330 	ret = rte_dma_start(test_dev_id);
331 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
332 
333 	/* Check reconfigure and vchan setup when device started */
334 	ret = rte_dma_configure(test_dev_id, &dev_conf);
335 	RTE_TEST_ASSERT(ret == -EBUSY, "Failed to configure, %d", ret);
336 	ret = rte_dma_vchan_setup(test_dev_id, 0, &vchan_conf);
337 	RTE_TEST_ASSERT(ret == -EBUSY, "Failed to setup vchan, %d", ret);
338 
339 	ret = rte_dma_stop(test_dev_id);
340 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
341 
342 	return TEST_SUCCESS;
343 }
344 
345 static int
346 test_dma_reconfigure(void)
347 {
348 	struct rte_dma_conf dev_conf = { 0 };
349 	struct rte_dma_info dev_info = { 0 };
350 	uint16_t cfg_vchans;
351 	int ret;
352 
353 	ret = rte_dma_info_get(test_dev_id, &dev_info);
354 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
355 
356 	/* At least two vchans required for the test */
357 	if (dev_info.max_vchans < 2)
358 		return TEST_SKIPPED;
359 
360 	/* Setup one vchan for later test */
361 	ret = setup_vchan(1);
362 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
363 
364 	ret = rte_dma_start(test_dev_id);
365 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
366 
367 	ret = rte_dma_stop(test_dev_id);
368 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
369 
370 	/* Check reconfigure and vchan setup after device stopped */
371 	cfg_vchans = dev_conf.nb_vchans = (dev_info.max_vchans - 1);
372 
373 	ret = setup_vchan(cfg_vchans);
374 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
375 
376 	ret = rte_dma_start(test_dev_id);
377 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
378 
379 	ret = rte_dma_info_get(test_dev_id, &dev_info);
380 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
381 	RTE_TEST_ASSERT_EQUAL(dev_info.nb_vchans, cfg_vchans, "incorrect reconfiguration");
382 
383 	ret = rte_dma_stop(test_dev_id);
384 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
385 
386 	return TEST_SUCCESS;
387 }
388 
389 static int
390 test_dma_stats(void)
391 {
392 	struct rte_dma_info dev_info = { 0 };
393 	struct rte_dma_stats stats = { 0 };
394 	int ret;
395 
396 	/* Check for invalid parameters */
397 	ret = rte_dma_stats_get(invalid_dev_id, 0, &stats);
398 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
399 	ret = rte_dma_stats_get(invalid_dev_id, 0, NULL);
400 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
401 	ret = rte_dma_stats_reset(invalid_dev_id, 0);
402 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
403 
404 	/* Setup one vchan for later test */
405 	ret = setup_vchan(1);
406 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
407 
408 	/* Check for invalid vchan */
409 	ret = rte_dma_info_get(test_dev_id, &dev_info);
410 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
411 	ret = rte_dma_stats_get(test_dev_id, dev_info.max_vchans, &stats);
412 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
413 	ret = rte_dma_stats_reset(test_dev_id, dev_info.max_vchans);
414 	RTE_TEST_ASSERT(ret == -EINVAL, "Expected -EINVAL, %d", ret);
415 
416 	/* Check for valid vchan */
417 	ret = rte_dma_stats_get(test_dev_id, 0, &stats);
418 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get stats, %d", ret);
419 	ret = rte_dma_stats_get(test_dev_id, RTE_DMA_ALL_VCHAN, &stats);
420 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to get all stats, %d", ret);
421 	ret = rte_dma_stats_reset(test_dev_id, 0);
422 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset stats, %d", ret);
423 	ret = rte_dma_stats_reset(test_dev_id, RTE_DMA_ALL_VCHAN);
424 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to reset all stats, %d", ret);
425 
426 	return TEST_SUCCESS;
427 }
428 
429 static int
430 test_dma_dump(void)
431 {
432 	int ret;
433 
434 	/* Check for invalid parameters */
435 	ret = rte_dma_dump(invalid_dev_id, stderr);
436 	RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
437 	ret = rte_dma_dump(test_dev_id, NULL);
438 	RTE_TEST_ASSERT(ret == -EINVAL, "Excepted -EINVAL, %d", ret);
439 
440 	return TEST_SUCCESS;
441 }
442 
443 static void
444 setup_memory(void)
445 {
446 	int i;
447 
448 	for (i = 0; i < TEST_MEMCPY_SIZE; i++)
449 		src[i] = (char)i;
450 	memset(dst, 0, TEST_MEMCPY_SIZE);
451 }
452 
453 static int
454 verify_memory(void)
455 {
456 	int i;
457 
458 	for (i = 0; i < TEST_MEMCPY_SIZE; i++) {
459 		if (src[i] == dst[i])
460 			continue;
461 		RTE_TEST_ASSERT_EQUAL(src[i], dst[i],
462 			"Failed to copy memory, %d %d", src[i], dst[i]);
463 	}
464 
465 	return 0;
466 }
467 
468 static void
469 sg_memory_setup(int n)
470 {
471 	int i, j;
472 
473 	for (i = 0; i < n; i++) {
474 		for (j = 0; j < TEST_MEMCPY_SIZE; j++)
475 			src_sg[i][j] = (char)j;
476 
477 		memset(dst_sg[i], 0, TEST_MEMCPY_SIZE);
478 	}
479 }
480 
481 static int
482 sg_memory_verify(int n)
483 {
484 	int i, j;
485 
486 	for (i = 0; i < n; i++) {
487 		for (j = 0; j < TEST_MEMCPY_SIZE; j++) {
488 			if (src_sg[i][j] == dst_sg[i][j])
489 				continue;
490 
491 			RTE_TEST_ASSERT_EQUAL(src_sg[i][j], dst_sg[i][j], "Failed to copy memory, %d %d",
492 				src_sg[i][j], dst_sg[i][j]);
493 		}
494 	}
495 
496 	return 0;
497 }
498 
499 static int
500 test_dma_completed(void)
501 {
502 	uint16_t last_idx = 1;
503 	bool has_error = true;
504 	uint16_t cpl_ret;
505 	int ret;
506 
507 	/* Setup one vchan for later test */
508 	ret = setup_vchan(1);
509 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
510 
511 	ret = rte_dma_start(test_dev_id);
512 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
513 
514 	setup_memory();
515 
516 	/* Check enqueue without submit */
517 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
518 			   TEST_MEMCPY_SIZE, 0);
519 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
520 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
521 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
522 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to get completed");
523 
524 	/* Check add submit */
525 	ret = rte_dma_submit(test_dev_id, 0);
526 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to submit, %d", ret);
527 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
528 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
529 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
530 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
531 				last_idx);
532 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
533 	ret = verify_memory();
534 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
535 
536 	setup_memory();
537 
538 	/* Check for enqueue with submit */
539 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
540 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
541 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
542 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
543 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
544 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
545 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
546 				last_idx);
547 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
548 	ret = verify_memory();
549 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
550 
551 	/* Stop dmadev to make sure dmadev to a known state */
552 	ret = rte_dma_stop(test_dev_id);
553 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
554 
555 	return TEST_SUCCESS;
556 }
557 
558 static int
559 test_dma_completed_status(void)
560 {
561 	enum rte_dma_status_code status[1] = { 1 };
562 	uint16_t last_idx = 1;
563 	uint16_t cpl_ret, i;
564 	int ret;
565 
566 	/* Setup one vchan for later test */
567 	ret = setup_vchan(1);
568 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
569 
570 	ret = rte_dma_start(test_dev_id);
571 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
572 
573 	/* Check for enqueue with submit */
574 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
575 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
576 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
577 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
578 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
579 					   status);
580 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
581 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u",
582 				last_idx);
583 	for (i = 0; i < RTE_DIM(status); i++)
584 		RTE_TEST_ASSERT_EQUAL(status[i], 0,
585 				"Failed to completed status, %d", status[i]);
586 
587 	/* Check do completed status again */
588 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
589 					   status);
590 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to completed status");
591 
592 	/* Check for enqueue with submit again */
593 	ret = rte_dma_copy(test_dev_id, 0, (rte_iova_t)src, (rte_iova_t)dst,
594 			   TEST_MEMCPY_SIZE, RTE_DMA_OP_FLAG_SUBMIT);
595 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
596 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
597 	cpl_ret = rte_dma_completed_status(test_dev_id, 0, 1, &last_idx,
598 					   status);
599 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to completed status");
600 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u",
601 				last_idx);
602 	for (i = 0; i < RTE_DIM(status); i++)
603 		RTE_TEST_ASSERT_EQUAL(status[i], 0,
604 				"Failed to completed status, %d", status[i]);
605 
606 	/* Stop dmadev to make sure dmadev to a known state */
607 	ret = rte_dma_stop(test_dev_id);
608 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
609 
610 	return TEST_SUCCESS;
611 }
612 
613 static int
614 test_dma_sg(void)
615 {
616 	struct rte_dma_sge src_sge[TEST_SG_MAX], dst_sge[TEST_SG_MAX];
617 	struct rte_dma_info dev_info = { 0 };
618 	uint16_t last_idx = -1;
619 	bool has_error = true;
620 	int n_sge, i, ret;
621 	uint16_t cpl_ret;
622 
623 	ret = rte_dma_info_get(test_dev_id, &dev_info);
624 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to obtain device info, %d", ret);
625 
626 	if ((dev_info.dev_capa & RTE_DMA_CAPA_OPS_COPY_SG) == 0)
627 		return TEST_SKIPPED;
628 
629 	n_sge = RTE_MIN(dev_info.max_sges, TEST_SG_MAX);
630 
631 	ret = setup_vchan(1);
632 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to setup one vchan, %d", ret);
633 
634 	ret = rte_dma_start(test_dev_id);
635 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to start, %d", ret);
636 
637 	for (i = 0; i < n_sge; i++) {
638 		src_sge[i].addr = rte_malloc_virt2iova(src_sg[i]);
639 		src_sge[i].length = TEST_MEMCPY_SIZE;
640 		dst_sge[i].addr = rte_malloc_virt2iova(dst_sg[i]);
641 		dst_sge[i].length = TEST_MEMCPY_SIZE;
642 	}
643 
644 	sg_memory_setup(n_sge);
645 
646 	/* Check enqueue without submit */
647 	ret = rte_dma_copy_sg(test_dev_id, 0, src_sge, dst_sge, n_sge, n_sge, 0);
648 	RTE_TEST_ASSERT_EQUAL(ret, 0, "Failed to enqueue copy, %d", ret);
649 
650 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
651 
652 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
653 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 0, "Failed to get completed");
654 
655 	/* Check DMA submit */
656 	ret = rte_dma_submit(test_dev_id, 0);
657 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to submit, %d", ret);
658 
659 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
660 
661 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
662 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
663 	RTE_TEST_ASSERT_EQUAL(last_idx, 0, "Last idx should be zero, %u", last_idx);
664 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
665 
666 	ret = sg_memory_verify(n_sge);
667 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
668 
669 	sg_memory_setup(n_sge);
670 
671 	/* Check for enqueue with submit */
672 	ret = rte_dma_copy_sg(test_dev_id, 0, src_sge, dst_sge, n_sge, n_sge,
673 			      RTE_DMA_OP_FLAG_SUBMIT);
674 	RTE_TEST_ASSERT_EQUAL(ret, 1, "Failed to enqueue copy, %d", ret);
675 
676 	rte_delay_us_sleep(TEST_WAIT_US_VAL);
677 
678 	cpl_ret = rte_dma_completed(test_dev_id, 0, 1, &last_idx, &has_error);
679 	RTE_TEST_ASSERT_EQUAL(cpl_ret, 1, "Failed to get completed");
680 	RTE_TEST_ASSERT_EQUAL(last_idx, 1, "Last idx should be 1, %u", last_idx);
681 	RTE_TEST_ASSERT_EQUAL(has_error, false, "Should have no error");
682 
683 	ret = sg_memory_verify(n_sge);
684 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to verify memory");
685 
686 	/* Stop dmadev to make sure dmadev to a known state */
687 	ret = rte_dma_stop(test_dev_id);
688 	RTE_TEST_ASSERT_SUCCESS(ret, "Failed to stop, %d", ret);
689 
690 	return TEST_SUCCESS;
691 }
692 
693 static struct unit_test_suite dma_api_testsuite = {
694 	.suite_name = "DMA API Test Suite",
695 	.setup = testsuite_setup,
696 	.teardown = testsuite_teardown,
697 	.unit_test_cases = {
698 		TEST_CASE(test_dma_get_dev_id_by_name),
699 		TEST_CASE(test_dma_is_valid_dev),
700 		TEST_CASE(test_dma_count),
701 		TEST_CASE(test_dma_info_get),
702 		TEST_CASE(test_dma_configure),
703 		TEST_CASE(test_dma_vchan_setup),
704 		TEST_CASE(test_dma_start_stop),
705 		TEST_CASE(test_dma_reconfigure),
706 		TEST_CASE(test_dma_stats),
707 		TEST_CASE(test_dma_dump),
708 		TEST_CASE(test_dma_completed),
709 		TEST_CASE(test_dma_completed_status),
710 		TEST_CASE(test_dma_sg),
711 		TEST_CASES_END()
712 	}
713 };
714 
715 int
716 test_dma_api(uint16_t dev_id)
717 {
718 	struct rte_dma_info dev_info;
719 
720 	if (rte_dma_info_get(dev_id, &dev_info) < 0)
721 		return TEST_SKIPPED;
722 
723 	printf("\n### Test dmadev infrastructure using %u [%s]\n", dev_id, dev_info.dev_name);
724 	test_dev_id = dev_id;
725 	return unit_test_suite_runner(&dma_api_testsuite);
726 };
727