1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright (c) 2022 Marvell.
3 */
4
5 #include <rte_common.h>
6 #include <rte_launch.h>
7
8 #include "ml_common.h"
9 #include "test_inference_common.h"
10 #include "test_stats.h"
11
12 static int
test_inference_interleave_driver(struct ml_test * test,struct ml_options * opt)13 test_inference_interleave_driver(struct ml_test *test, struct ml_options *opt)
14 {
15 struct test_inference *t;
16 uint16_t fid = 0;
17 int ret = 0;
18
19 t = ml_test_priv(test);
20
21 ret = ml_inference_mldev_setup(test, opt);
22 if (ret != 0)
23 return ret;
24
25 ret = ml_inference_mem_setup(test, opt);
26 if (ret != 0)
27 return ret;
28
29 /* load and start all models */
30 for (fid = 0; fid < opt->nb_filelist; fid++) {
31 ret = ml_model_load(test, opt, &t->model[fid], fid);
32 if (ret != 0)
33 goto error;
34
35 ret = ml_model_start(test, opt, &t->model[fid], fid);
36 if (ret != 0)
37 goto error;
38
39 ret = ml_inference_iomem_setup(test, opt, fid);
40 if (ret != 0)
41 goto error;
42 }
43
44 /* launch inference requests */
45 ret = ml_inference_launch_cores(test, opt, 0, opt->nb_filelist - 1);
46 if (ret != 0) {
47 ml_err("failed to launch cores");
48 goto error;
49 }
50
51 rte_eal_mp_wait_lcore();
52
53 /* stop and unload all models */
54 for (fid = 0; fid < opt->nb_filelist; fid++) {
55 ret = ml_inference_result(test, opt, fid);
56 if (ret != ML_TEST_SUCCESS)
57 goto error;
58
59 ml_inference_iomem_destroy(test, opt, fid);
60 }
61
62 for (fid = 0; fid < opt->nb_filelist; fid++)
63 ml_stats_get(test, opt, RTE_ML_DEV_XSTATS_MODEL, fid);
64
65 for (fid = 0; fid < opt->nb_filelist; fid++) {
66 ret = ml_model_stop(test, opt, &t->model[fid], fid);
67 if (ret != 0)
68 goto error;
69
70 ret = ml_model_unload(test, opt, &t->model[fid], fid);
71 if (ret != 0)
72 goto error;
73 }
74
75 ml_stats_get(test, opt, RTE_ML_DEV_XSTATS_DEVICE, -1);
76 ml_throughput_get(test, opt);
77 ml_inference_mem_destroy(test, opt);
78
79 ret = ml_inference_mldev_destroy(test, opt);
80 if (ret != 0)
81 return ret;
82
83 t->cmn.result = ML_TEST_SUCCESS;
84
85 return 0;
86
87 error:
88 ml_inference_mem_destroy(test, opt);
89 for (fid = 0; fid < opt->nb_filelist; fid++) {
90 ml_inference_iomem_destroy(test, opt, fid);
91 ml_model_stop(test, opt, &t->model[fid], fid);
92 ml_model_unload(test, opt, &t->model[fid], fid);
93 }
94
95 t->cmn.result = ML_TEST_FAILED;
96
97 return ret;
98 }
99
100 static int
test_inference_interleave_result(struct ml_test * test,struct ml_options * opt)101 test_inference_interleave_result(struct ml_test *test, struct ml_options *opt)
102 {
103 struct test_inference *t;
104
105 RTE_SET_USED(opt);
106
107 t = ml_test_priv(test);
108
109 return t->cmn.result;
110 }
111
112 static const struct ml_test_ops inference_interleave = {
113 .cap_check = test_inference_cap_check,
114 .opt_check = test_inference_opt_check,
115 .opt_dump = test_inference_opt_dump,
116 .test_setup = test_inference_setup,
117 .test_destroy = test_inference_destroy,
118 .test_driver = test_inference_interleave_driver,
119 .test_result = test_inference_interleave_result,
120 };
121
122 ML_TEST_REGISTER(inference_interleave);
123