xref: /dpdk/app/test/test_cryptodev_crosscheck.c (revision 2c0519b2cfb1f2976cefc69e2035f35512a72f34)
1*2c0519b2SVolodymyr Fialko /* SPDX-License-Identifier: BSD-3-Clause
2*2c0519b2SVolodymyr Fialko  * Copyright(C) 2023 Marvell.
3*2c0519b2SVolodymyr Fialko  */
4*2c0519b2SVolodymyr Fialko #include <rte_cryptodev.h>
5*2c0519b2SVolodymyr Fialko #include <rte_malloc.h>
6*2c0519b2SVolodymyr Fialko 
7*2c0519b2SVolodymyr Fialko #include "test.h"
8*2c0519b2SVolodymyr Fialko #include "test_cryptodev.h"
9*2c0519b2SVolodymyr Fialko 
10*2c0519b2SVolodymyr Fialko #define MAX_NB_SESSIONS 1
11*2c0519b2SVolodymyr Fialko #define MAX_TEST_STRING_LEN 256
12*2c0519b2SVolodymyr Fialko 
13*2c0519b2SVolodymyr Fialko /*
14*2c0519b2SVolodymyr Fialko  * The test suite will iterate through the capabilities of each probed cryptodev to identify the
15*2c0519b2SVolodymyr Fialko  * common ones. Once the common capabilities are determined, the test suite will generate potential
16*2c0519b2SVolodymyr Fialko  * valid inputs and crosscheck (compare) the output results from all cryptodevs.
17*2c0519b2SVolodymyr Fialko  */
18*2c0519b2SVolodymyr Fialko static struct rte_cryptodev_symmetric_capability *common_symm_capas;
19*2c0519b2SVolodymyr Fialko static uint16_t nb_common_sym_caps;
20*2c0519b2SVolodymyr Fialko 
21*2c0519b2SVolodymyr Fialko /* Policies of capabilities selection */
22*2c0519b2SVolodymyr Fialko enum capability_select_type {
23*2c0519b2SVolodymyr Fialko 	CAPABILITY_TYPE_MIN,
24*2c0519b2SVolodymyr Fialko 	CAPABILITY_TYPE_MAX,
25*2c0519b2SVolodymyr Fialko 	CAPABILITY_TYPE_LAST,
26*2c0519b2SVolodymyr Fialko };
27*2c0519b2SVolodymyr Fialko 
28*2c0519b2SVolodymyr Fialko static const char * const capability_select_strings[] = {
29*2c0519b2SVolodymyr Fialko 	[CAPABILITY_TYPE_MIN] = "MIN",
30*2c0519b2SVolodymyr Fialko 	[CAPABILITY_TYPE_MAX] = "MAX",
31*2c0519b2SVolodymyr Fialko };
32*2c0519b2SVolodymyr Fialko 
33*2c0519b2SVolodymyr Fialko /* Length of input text to be encrypted */
34*2c0519b2SVolodymyr Fialko static size_t input_length[] = { 64, 256, 512 };
35*2c0519b2SVolodymyr Fialko 
36*2c0519b2SVolodymyr Fialko /* Calculate number of test cases(combinations) per algorithm */
37*2c0519b2SVolodymyr Fialko #define NB_TEST_CASES_PER_ALGO (CAPABILITY_TYPE_LAST * RTE_DIM(input_length))
38*2c0519b2SVolodymyr Fialko 
39*2c0519b2SVolodymyr Fialko enum crypto_op_type {
40*2c0519b2SVolodymyr Fialko 	OP_ENCRYPT,
41*2c0519b2SVolodymyr Fialko 	OP_DECRYPT,
42*2c0519b2SVolodymyr Fialko };
43*2c0519b2SVolodymyr Fialko 
44*2c0519b2SVolodymyr Fialko struct crosscheck_test_profile {
45*2c0519b2SVolodymyr Fialko 	char name[MAX_TEST_STRING_LEN];
46*2c0519b2SVolodymyr Fialko 	size_t input_buf_len;
47*2c0519b2SVolodymyr Fialko 	enum rte_crypto_sym_xform_type xform_type;
48*2c0519b2SVolodymyr Fialko 	int algo;
49*2c0519b2SVolodymyr Fialko 	uint16_t block_size;
50*2c0519b2SVolodymyr Fialko 	uint16_t key_size;
51*2c0519b2SVolodymyr Fialko 	uint16_t iv_size;
52*2c0519b2SVolodymyr Fialko 	uint16_t digest_size;
53*2c0519b2SVolodymyr Fialko 	uint16_t aad_size;
54*2c0519b2SVolodymyr Fialko 	uint32_t dataunit_set;
55*2c0519b2SVolodymyr Fialko };
56*2c0519b2SVolodymyr Fialko 
57*2c0519b2SVolodymyr Fialko struct meta_test_suite {
58*2c0519b2SVolodymyr Fialko 	char suite_name[MAX_TEST_STRING_LEN];
59*2c0519b2SVolodymyr Fialko 	struct crosscheck_test_profile profile[NB_TEST_CASES_PER_ALGO];
60*2c0519b2SVolodymyr Fialko };
61*2c0519b2SVolodymyr Fialko 
62*2c0519b2SVolodymyr Fialko struct memory_segment {
63*2c0519b2SVolodymyr Fialko 	uint8_t *mem;
64*2c0519b2SVolodymyr Fialko 	uint16_t len;
65*2c0519b2SVolodymyr Fialko };
66*2c0519b2SVolodymyr Fialko 
67*2c0519b2SVolodymyr Fialko struct crosscheck_testsuite_params {
68*2c0519b2SVolodymyr Fialko 	struct rte_mempool *mbuf_pool;
69*2c0519b2SVolodymyr Fialko 	struct rte_mempool *op_mpool;
70*2c0519b2SVolodymyr Fialko 	struct rte_mempool *session_mpool;
71*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_config conf;
72*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_qp_conf qp_conf;
73*2c0519b2SVolodymyr Fialko 
74*2c0519b2SVolodymyr Fialko 	uint8_t valid_devs[RTE_CRYPTO_MAX_DEVS];
75*2c0519b2SVolodymyr Fialko 	uint8_t valid_dev_count;
76*2c0519b2SVolodymyr Fialko 
77*2c0519b2SVolodymyr Fialko 	struct memory_segment key;
78*2c0519b2SVolodymyr Fialko 	struct memory_segment digest;
79*2c0519b2SVolodymyr Fialko 	struct memory_segment aad;
80*2c0519b2SVolodymyr Fialko 	struct memory_segment iv;
81*2c0519b2SVolodymyr Fialko 
82*2c0519b2SVolodymyr Fialko 	struct memory_segment expected_digest;
83*2c0519b2SVolodymyr Fialko 	struct memory_segment expected_aad;
84*2c0519b2SVolodymyr Fialko };
85*2c0519b2SVolodymyr Fialko 
86*2c0519b2SVolodymyr Fialko static struct crosscheck_testsuite_params testsuite_params;
87*2c0519b2SVolodymyr Fialko 
88*2c0519b2SVolodymyr Fialko static const char*
algo_name_get(const struct rte_cryptodev_symmetric_capability * capa)89*2c0519b2SVolodymyr Fialko algo_name_get(const struct rte_cryptodev_symmetric_capability *capa)
90*2c0519b2SVolodymyr Fialko {
91*2c0519b2SVolodymyr Fialko 	switch (capa->xform_type) {
92*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
93*2c0519b2SVolodymyr Fialko 		return rte_cryptodev_get_auth_algo_string(capa->auth.algo);
94*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
95*2c0519b2SVolodymyr Fialko 		return rte_cryptodev_get_cipher_algo_string(capa->cipher.algo);
96*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
97*2c0519b2SVolodymyr Fialko 		return rte_cryptodev_get_aead_algo_string(capa->aead.algo);
98*2c0519b2SVolodymyr Fialko 	default:
99*2c0519b2SVolodymyr Fialko 		return NULL;
100*2c0519b2SVolodymyr Fialko 	}
101*2c0519b2SVolodymyr Fialko }
102*2c0519b2SVolodymyr Fialko 
103*2c0519b2SVolodymyr Fialko static void
incrementing_generate(uint8_t * dst,uint8_t start,uint16_t size)104*2c0519b2SVolodymyr Fialko incrementing_generate(uint8_t *dst, uint8_t start, uint16_t size)
105*2c0519b2SVolodymyr Fialko {
106*2c0519b2SVolodymyr Fialko 	int i;
107*2c0519b2SVolodymyr Fialko 
108*2c0519b2SVolodymyr Fialko 	for (i = 0; i < size; i++)
109*2c0519b2SVolodymyr Fialko 		dst[i] = start + i;
110*2c0519b2SVolodymyr Fialko }
111*2c0519b2SVolodymyr Fialko 
112*2c0519b2SVolodymyr Fialko static void
pattern_fill(uint8_t * input,const char * pattern,uint16_t size)113*2c0519b2SVolodymyr Fialko pattern_fill(uint8_t *input, const char *pattern, uint16_t size)
114*2c0519b2SVolodymyr Fialko {
115*2c0519b2SVolodymyr Fialko 	size_t pattern_len = strlen(pattern);
116*2c0519b2SVolodymyr Fialko 	size_t filled_len = 0, to_fill;
117*2c0519b2SVolodymyr Fialko 
118*2c0519b2SVolodymyr Fialko 	while (filled_len < size) {
119*2c0519b2SVolodymyr Fialko 		to_fill = RTE_MIN(pattern_len, size - filled_len);
120*2c0519b2SVolodymyr Fialko 		rte_memcpy(input, pattern, to_fill);
121*2c0519b2SVolodymyr Fialko 		filled_len += to_fill;
122*2c0519b2SVolodymyr Fialko 		input += to_fill;
123*2c0519b2SVolodymyr Fialko 	}
124*2c0519b2SVolodymyr Fialko }
125*2c0519b2SVolodymyr Fialko 
126*2c0519b2SVolodymyr Fialko static struct crosscheck_test_profile
profile_create(const struct rte_cryptodev_symmetric_capability * capa,enum capability_select_type capability_type,size_t input_len)127*2c0519b2SVolodymyr Fialko profile_create(const struct rte_cryptodev_symmetric_capability *capa,
128*2c0519b2SVolodymyr Fialko 	       enum capability_select_type capability_type, size_t input_len)
129*2c0519b2SVolodymyr Fialko {
130*2c0519b2SVolodymyr Fialko 	struct crosscheck_test_profile profile;
131*2c0519b2SVolodymyr Fialko 
132*2c0519b2SVolodymyr Fialko 	memset(&profile, 0, sizeof(profile));
133*2c0519b2SVolodymyr Fialko 	profile.xform_type = capa->xform_type;
134*2c0519b2SVolodymyr Fialko 
135*2c0519b2SVolodymyr Fialko 	switch (capa->xform_type) {
136*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
137*2c0519b2SVolodymyr Fialko 		profile.block_size = capa->auth.block_size;
138*2c0519b2SVolodymyr Fialko 		profile.algo = capa->auth.algo;
139*2c0519b2SVolodymyr Fialko 
140*2c0519b2SVolodymyr Fialko 		switch (capability_type) {
141*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MIN:
142*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->auth.key_size.min;
143*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->auth.iv_size.min;
144*2c0519b2SVolodymyr Fialko 			profile.digest_size = capa->auth.digest_size.min;
145*2c0519b2SVolodymyr Fialko 			profile.aad_size = capa->auth.aad_size.min;
146*2c0519b2SVolodymyr Fialko 			break;
147*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MAX:
148*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->auth.key_size.max;
149*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->auth.iv_size.max;
150*2c0519b2SVolodymyr Fialko 			profile.digest_size = capa->auth.digest_size.max;
151*2c0519b2SVolodymyr Fialko 			profile.aad_size = capa->auth.aad_size.max;
152*2c0519b2SVolodymyr Fialko 			break;
153*2c0519b2SVolodymyr Fialko 		default:
154*2c0519b2SVolodymyr Fialko 			rte_panic("Wrong capability profile type: %i\n", capability_type);
155*2c0519b2SVolodymyr Fialko 			break;
156*2c0519b2SVolodymyr Fialko 		}
157*2c0519b2SVolodymyr Fialko 		break;
158*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
159*2c0519b2SVolodymyr Fialko 		profile.block_size = capa->cipher.block_size;
160*2c0519b2SVolodymyr Fialko 		profile.algo = capa->cipher.algo;
161*2c0519b2SVolodymyr Fialko 		profile.dataunit_set = capa->cipher.dataunit_set;
162*2c0519b2SVolodymyr Fialko 
163*2c0519b2SVolodymyr Fialko 		switch (capability_type) {
164*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MIN:
165*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->cipher.key_size.min;
166*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->cipher.iv_size.min;
167*2c0519b2SVolodymyr Fialko 			break;
168*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MAX:
169*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->cipher.key_size.max;
170*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->cipher.iv_size.max;
171*2c0519b2SVolodymyr Fialko 			break;
172*2c0519b2SVolodymyr Fialko 		default:
173*2c0519b2SVolodymyr Fialko 			rte_panic("Wrong capability profile type: %i\n", capability_type);
174*2c0519b2SVolodymyr Fialko 			break;
175*2c0519b2SVolodymyr Fialko 		}
176*2c0519b2SVolodymyr Fialko 		break;
177*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
178*2c0519b2SVolodymyr Fialko 		profile.block_size = capa->aead.block_size;
179*2c0519b2SVolodymyr Fialko 		profile.algo = capa->aead.algo;
180*2c0519b2SVolodymyr Fialko 
181*2c0519b2SVolodymyr Fialko 		switch (capability_type) {
182*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MIN:
183*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->aead.key_size.min;
184*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->aead.iv_size.min;
185*2c0519b2SVolodymyr Fialko 			profile.digest_size = capa->aead.digest_size.min;
186*2c0519b2SVolodymyr Fialko 			profile.aad_size = capa->aead.aad_size.min;
187*2c0519b2SVolodymyr Fialko 			break;
188*2c0519b2SVolodymyr Fialko 		case CAPABILITY_TYPE_MAX:
189*2c0519b2SVolodymyr Fialko 			profile.key_size = capa->aead.key_size.max;
190*2c0519b2SVolodymyr Fialko 			profile.iv_size = capa->aead.iv_size.max;
191*2c0519b2SVolodymyr Fialko 			profile.digest_size = capa->aead.digest_size.max;
192*2c0519b2SVolodymyr Fialko 			profile.aad_size = capa->aead.aad_size.max;
193*2c0519b2SVolodymyr Fialko 			break;
194*2c0519b2SVolodymyr Fialko 		default:
195*2c0519b2SVolodymyr Fialko 			rte_panic("Wrong capability profile type: %i\n", capability_type);
196*2c0519b2SVolodymyr Fialko 			break;
197*2c0519b2SVolodymyr Fialko 		}
198*2c0519b2SVolodymyr Fialko 		break;
199*2c0519b2SVolodymyr Fialko 	default:
200*2c0519b2SVolodymyr Fialko 		rte_panic("Wrong xform profile type: %i\n", capa->xform_type);
201*2c0519b2SVolodymyr Fialko 		break;
202*2c0519b2SVolodymyr Fialko 	}
203*2c0519b2SVolodymyr Fialko 
204*2c0519b2SVolodymyr Fialko 	profile.input_buf_len = RTE_ALIGN_CEIL(input_len, profile.block_size);
205*2c0519b2SVolodymyr Fialko 
206*2c0519b2SVolodymyr Fialko 	snprintf(profile.name, MAX_TEST_STRING_LEN,
207*2c0519b2SVolodymyr Fialko 			"'%s' - capabilities: '%s', input len: '%zu'",
208*2c0519b2SVolodymyr Fialko 			algo_name_get(capa), capability_select_strings[capability_type],
209*2c0519b2SVolodymyr Fialko 			input_len);
210*2c0519b2SVolodymyr Fialko 
211*2c0519b2SVolodymyr Fialko 	return profile;
212*2c0519b2SVolodymyr Fialko }
213*2c0519b2SVolodymyr Fialko 
214*2c0519b2SVolodymyr Fialko static inline int
common_range_set(struct rte_crypto_param_range * dst,const struct rte_crypto_param_range * src)215*2c0519b2SVolodymyr Fialko common_range_set(struct rte_crypto_param_range *dst, const struct rte_crypto_param_range *src)
216*2c0519b2SVolodymyr Fialko {
217*2c0519b2SVolodymyr Fialko 	/* Check if ranges overlaps */
218*2c0519b2SVolodymyr Fialko 	if ((dst->min > src->max) && (dst->max < src->min))
219*2c0519b2SVolodymyr Fialko 		return -1;
220*2c0519b2SVolodymyr Fialko 	dst->min = RTE_MAX(dst->min, src->min);
221*2c0519b2SVolodymyr Fialko 	dst->max = RTE_MIN(dst->max, src->max);
222*2c0519b2SVolodymyr Fialko 
223*2c0519b2SVolodymyr Fialko 	return 0;
224*2c0519b2SVolodymyr Fialko }
225*2c0519b2SVolodymyr Fialko 
226*2c0519b2SVolodymyr Fialko static uint16_t
nb_sym_capabilities_get(const struct rte_cryptodev_capabilities * cap)227*2c0519b2SVolodymyr Fialko nb_sym_capabilities_get(const struct rte_cryptodev_capabilities *cap)
228*2c0519b2SVolodymyr Fialko {
229*2c0519b2SVolodymyr Fialko 	uint16_t nb_caps = 0;
230*2c0519b2SVolodymyr Fialko 
231*2c0519b2SVolodymyr Fialko 	for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++) {
232*2c0519b2SVolodymyr Fialko 		if (cap->op == RTE_CRYPTO_OP_TYPE_SYMMETRIC)
233*2c0519b2SVolodymyr Fialko 			nb_caps += 1;
234*2c0519b2SVolodymyr Fialko 	}
235*2c0519b2SVolodymyr Fialko 
236*2c0519b2SVolodymyr Fialko 	return nb_caps;
237*2c0519b2SVolodymyr Fialko }
238*2c0519b2SVolodymyr Fialko 
239*2c0519b2SVolodymyr Fialko static struct rte_cryptodev_sym_capability_idx
sym_capability_to_idx(const struct rte_cryptodev_symmetric_capability * cap)240*2c0519b2SVolodymyr Fialko sym_capability_to_idx(const struct rte_cryptodev_symmetric_capability *cap)
241*2c0519b2SVolodymyr Fialko {
242*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_sym_capability_idx cap_idx;
243*2c0519b2SVolodymyr Fialko 
244*2c0519b2SVolodymyr Fialko 	cap_idx.type = cap->xform_type;
245*2c0519b2SVolodymyr Fialko 	switch (cap_idx.type) {
246*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
247*2c0519b2SVolodymyr Fialko 		cap_idx.algo.auth = cap->auth.algo;
248*2c0519b2SVolodymyr Fialko 		break;
249*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
250*2c0519b2SVolodymyr Fialko 		cap_idx.algo.cipher = cap->cipher.algo;
251*2c0519b2SVolodymyr Fialko 		break;
252*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
253*2c0519b2SVolodymyr Fialko 		cap_idx.algo.aead = cap->aead.algo;
254*2c0519b2SVolodymyr Fialko 		break;
255*2c0519b2SVolodymyr Fialko 	default:
256*2c0519b2SVolodymyr Fialko 		rte_panic("Wrong capability profile type: %i\n", cap_idx.type);
257*2c0519b2SVolodymyr Fialko 		break;
258*2c0519b2SVolodymyr Fialko 	}
259*2c0519b2SVolodymyr Fialko 
260*2c0519b2SVolodymyr Fialko 	return cap_idx;
261*2c0519b2SVolodymyr Fialko }
262*2c0519b2SVolodymyr Fialko 
263*2c0519b2SVolodymyr Fialko /* Set the biggest common range for all capability fields */
264*2c0519b2SVolodymyr Fialko static int
common_capability_set(struct rte_cryptodev_symmetric_capability * dst,const struct rte_cryptodev_symmetric_capability * src)265*2c0519b2SVolodymyr Fialko common_capability_set(struct rte_cryptodev_symmetric_capability *dst,
266*2c0519b2SVolodymyr Fialko 		       const struct rte_cryptodev_symmetric_capability *src)
267*2c0519b2SVolodymyr Fialko {
268*2c0519b2SVolodymyr Fialko 	switch (src->xform_type) {
269*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
270*2c0519b2SVolodymyr Fialko 		if (dst->auth.algo != src->auth.algo)
271*2c0519b2SVolodymyr Fialko 			return -ENOENT;
272*2c0519b2SVolodymyr Fialko 		if (dst->auth.block_size != src->auth.block_size)
273*2c0519b2SVolodymyr Fialko 			return -ENOENT;
274*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->auth.key_size, &src->auth.key_size))
275*2c0519b2SVolodymyr Fialko 			return -ENOENT;
276*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->auth.digest_size, &src->auth.digest_size))
277*2c0519b2SVolodymyr Fialko 			return -ENOENT;
278*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->auth.aad_size, &src->auth.aad_size))
279*2c0519b2SVolodymyr Fialko 			return -ENOENT;
280*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->auth.iv_size, &src->auth.iv_size))
281*2c0519b2SVolodymyr Fialko 			return -ENOENT;
282*2c0519b2SVolodymyr Fialko 		break;
283*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
284*2c0519b2SVolodymyr Fialko 		if (dst->cipher.algo != src->cipher.algo)
285*2c0519b2SVolodymyr Fialko 			return -ENOENT;
286*2c0519b2SVolodymyr Fialko 		if (dst->cipher.block_size != src->cipher.block_size)
287*2c0519b2SVolodymyr Fialko 			return -ENOENT;
288*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->cipher.key_size, &src->cipher.key_size))
289*2c0519b2SVolodymyr Fialko 			return -ENOENT;
290*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->cipher.iv_size, &src->cipher.iv_size))
291*2c0519b2SVolodymyr Fialko 			return -ENOENT;
292*2c0519b2SVolodymyr Fialko 		if (dst->cipher.dataunit_set != src->cipher.dataunit_set)
293*2c0519b2SVolodymyr Fialko 			return -ENOENT;
294*2c0519b2SVolodymyr Fialko 		break;
295*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
296*2c0519b2SVolodymyr Fialko 		if (dst->aead.algo != src->aead.algo)
297*2c0519b2SVolodymyr Fialko 			return -ENOENT;
298*2c0519b2SVolodymyr Fialko 		if (dst->aead.block_size != src->aead.block_size)
299*2c0519b2SVolodymyr Fialko 			return -ENOENT;
300*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->aead.key_size, &src->aead.key_size))
301*2c0519b2SVolodymyr Fialko 			return -ENOENT;
302*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->aead.digest_size, &src->aead.digest_size))
303*2c0519b2SVolodymyr Fialko 			return -ENOENT;
304*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->aead.aad_size, &src->aead.aad_size))
305*2c0519b2SVolodymyr Fialko 			return -ENOENT;
306*2c0519b2SVolodymyr Fialko 		if (common_range_set(&dst->aead.iv_size, &src->aead.iv_size))
307*2c0519b2SVolodymyr Fialko 			return -ENOENT;
308*2c0519b2SVolodymyr Fialko 		break;
309*2c0519b2SVolodymyr Fialko 	default:
310*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Unsupported xform_type!\n");
311*2c0519b2SVolodymyr Fialko 		return -ENOENT;
312*2c0519b2SVolodymyr Fialko 	}
313*2c0519b2SVolodymyr Fialko 
314*2c0519b2SVolodymyr Fialko 	return 0;
315*2c0519b2SVolodymyr Fialko }
316*2c0519b2SVolodymyr Fialko 
317*2c0519b2SVolodymyr Fialko static int
capabilities_inspect(void)318*2c0519b2SVolodymyr Fialko capabilities_inspect(void)
319*2c0519b2SVolodymyr Fialko {
320*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
321*2c0519b2SVolodymyr Fialko 	const struct rte_cryptodev_symmetric_capability *next_dev_cap;
322*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_symmetric_capability common_cap;
323*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_sym_capability_idx cap_idx;
324*2c0519b2SVolodymyr Fialko 	const struct rte_cryptodev_capabilities *cap;
325*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_info dev_info;
326*2c0519b2SVolodymyr Fialko 	uint16_t nb_caps, cap_i = 0;
327*2c0519b2SVolodymyr Fialko 	uint8_t cdev_id, i;
328*2c0519b2SVolodymyr Fialko 
329*2c0519b2SVolodymyr Fialko 	/* Get list of capabilities of first device */
330*2c0519b2SVolodymyr Fialko 	cdev_id = ts_params->valid_devs[0];
331*2c0519b2SVolodymyr Fialko 	rte_cryptodev_info_get(cdev_id, &dev_info);
332*2c0519b2SVolodymyr Fialko 	cap = dev_info.capabilities;
333*2c0519b2SVolodymyr Fialko 	nb_caps = nb_sym_capabilities_get(cap);
334*2c0519b2SVolodymyr Fialko 	common_symm_capas = rte_calloc(NULL, nb_caps,
335*2c0519b2SVolodymyr Fialko 				       sizeof(struct rte_cryptodev_symmetric_capability), 0);
336*2c0519b2SVolodymyr Fialko 	if (common_symm_capas == NULL)
337*2c0519b2SVolodymyr Fialko 		return -ENOMEM;
338*2c0519b2SVolodymyr Fialko 
339*2c0519b2SVolodymyr Fialko 	for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++) {
340*2c0519b2SVolodymyr Fialko 		/* Skip non symmetric capabilities */
341*2c0519b2SVolodymyr Fialko 		if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
342*2c0519b2SVolodymyr Fialko 			continue;
343*2c0519b2SVolodymyr Fialko 		/* AES_CCM requires special handling due to api requirements, skip now */
344*2c0519b2SVolodymyr Fialko 		if (cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD &&
345*2c0519b2SVolodymyr Fialko 				cap->sym.aead.algo == RTE_CRYPTO_AEAD_AES_CCM)
346*2c0519b2SVolodymyr Fialko 			continue;
347*2c0519b2SVolodymyr Fialko 
348*2c0519b2SVolodymyr Fialko 		cap_idx = sym_capability_to_idx(&cap->sym);
349*2c0519b2SVolodymyr Fialko 		common_cap = cap->sym;
350*2c0519b2SVolodymyr Fialko 		for (i = 1; i < ts_params->valid_dev_count; i++) {
351*2c0519b2SVolodymyr Fialko 			cdev_id = ts_params->valid_devs[i];
352*2c0519b2SVolodymyr Fialko 			next_dev_cap = rte_cryptodev_sym_capability_get(cdev_id, &cap_idx);
353*2c0519b2SVolodymyr Fialko 			/* Capability not supported by one of devs, skip */
354*2c0519b2SVolodymyr Fialko 			if (next_dev_cap == NULL)
355*2c0519b2SVolodymyr Fialko 				goto skip;
356*2c0519b2SVolodymyr Fialko 			/* Check if capabilities have a common range of values */
357*2c0519b2SVolodymyr Fialko 			if (common_capability_set(&common_cap, next_dev_cap) != 0)
358*2c0519b2SVolodymyr Fialko 				goto skip;
359*2c0519b2SVolodymyr Fialko 		}
360*2c0519b2SVolodymyr Fialko 
361*2c0519b2SVolodymyr Fialko 		/* If capability reach this point - it's support by all cryptodevs */
362*2c0519b2SVolodymyr Fialko 		common_symm_capas[cap_i++] = common_cap;
363*2c0519b2SVolodymyr Fialko skip:;
364*2c0519b2SVolodymyr Fialko 	}
365*2c0519b2SVolodymyr Fialko 	nb_common_sym_caps = cap_i;
366*2c0519b2SVolodymyr Fialko 
367*2c0519b2SVolodymyr Fialko 	return 0;
368*2c0519b2SVolodymyr Fialko }
369*2c0519b2SVolodymyr Fialko 
370*2c0519b2SVolodymyr Fialko static int
crosscheck_init(void)371*2c0519b2SVolodymyr Fialko crosscheck_init(void)
372*2c0519b2SVolodymyr Fialko {
373*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
374*2c0519b2SVolodymyr Fialko 	const struct rte_cryptodev_symmetric_capability *cap;
375*2c0519b2SVolodymyr Fialko 	const uint32_t nb_queue_pairs = 1;
376*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_info info;
377*2c0519b2SVolodymyr Fialko 	uint32_t session_priv_size = 0;
378*2c0519b2SVolodymyr Fialko 	uint32_t nb_devs, dev_id;
379*2c0519b2SVolodymyr Fialko 	uint8_t i;
380*2c0519b2SVolodymyr Fialko 
381*2c0519b2SVolodymyr Fialko 	memset(ts_params, 0, sizeof(*ts_params));
382*2c0519b2SVolodymyr Fialko 
383*2c0519b2SVolodymyr Fialko 	/* Create list of valid crypto devs */
384*2c0519b2SVolodymyr Fialko 	nb_devs = rte_cryptodev_count();
385*2c0519b2SVolodymyr Fialko 	for (dev_id = 0; dev_id < nb_devs; dev_id++) {
386*2c0519b2SVolodymyr Fialko 		rte_cryptodev_info_get(dev_id, &info);
387*2c0519b2SVolodymyr Fialko 
388*2c0519b2SVolodymyr Fialko 		if (info.sym.max_nb_sessions != 0 && info.sym.max_nb_sessions < MAX_NB_SESSIONS)
389*2c0519b2SVolodymyr Fialko 			continue;
390*2c0519b2SVolodymyr Fialko 		if (info.max_nb_queue_pairs < nb_queue_pairs)
391*2c0519b2SVolodymyr Fialko 			continue;
392*2c0519b2SVolodymyr Fialko 		ts_params->valid_devs[ts_params->valid_dev_count++] = dev_id;
393*2c0519b2SVolodymyr Fialko 		/* Obtaining configuration parameters, that will satisfy all cryptodevs */
394*2c0519b2SVolodymyr Fialko 		session_priv_size = RTE_MAX(session_priv_size,
395*2c0519b2SVolodymyr Fialko 					    rte_cryptodev_sym_get_private_session_size(dev_id));
396*2c0519b2SVolodymyr Fialko 	}
397*2c0519b2SVolodymyr Fialko 
398*2c0519b2SVolodymyr Fialko 	if (ts_params->valid_dev_count < 2) {
399*2c0519b2SVolodymyr Fialko 		RTE_LOG(WARNING, USER1, "Min number of cryptodevs for test is 2, found (%d)\n",
400*2c0519b2SVolodymyr Fialko 			ts_params->valid_dev_count);
401*2c0519b2SVolodymyr Fialko 		return TEST_SKIPPED;
402*2c0519b2SVolodymyr Fialko 	}
403*2c0519b2SVolodymyr Fialko 
404*2c0519b2SVolodymyr Fialko 	/* Create pools for mbufs, crypto operations and sessions */
405*2c0519b2SVolodymyr Fialko 	ts_params->mbuf_pool = rte_pktmbuf_pool_create("CRYPTO_MBUFPOOL", NUM_MBUFS,
406*2c0519b2SVolodymyr Fialko 			MBUF_CACHE_SIZE, 0, MBUF_SIZE, rte_socket_id());
407*2c0519b2SVolodymyr Fialko 	if (ts_params->mbuf_pool == NULL) {
408*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Can't create CRYPTO_MBUFPOOL\n");
409*2c0519b2SVolodymyr Fialko 		return TEST_FAILED;
410*2c0519b2SVolodymyr Fialko 	}
411*2c0519b2SVolodymyr Fialko 
412*2c0519b2SVolodymyr Fialko 	ts_params->op_mpool = rte_crypto_op_pool_create("MBUF_CRYPTO_SYM_OP_POOL",
413*2c0519b2SVolodymyr Fialko 			RTE_CRYPTO_OP_TYPE_SYMMETRIC, NUM_MBUFS, MBUF_CACHE_SIZE,
414*2c0519b2SVolodymyr Fialko 			DEFAULT_NUM_XFORMS * sizeof(struct rte_crypto_sym_xform) +
415*2c0519b2SVolodymyr Fialko 			MAXIMUM_IV_LENGTH, rte_socket_id());
416*2c0519b2SVolodymyr Fialko 
417*2c0519b2SVolodymyr Fialko 	if (ts_params->op_mpool == NULL) {
418*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Can't create CRYPTO_OP_POOL\n");
419*2c0519b2SVolodymyr Fialko 		return TEST_FAILED;
420*2c0519b2SVolodymyr Fialko 	}
421*2c0519b2SVolodymyr Fialko 
422*2c0519b2SVolodymyr Fialko 	ts_params->session_mpool = rte_cryptodev_sym_session_pool_create("test_sess_mp",
423*2c0519b2SVolodymyr Fialko 			MAX_NB_SESSIONS, session_priv_size, 0, 0, SOCKET_ID_ANY);
424*2c0519b2SVolodymyr Fialko 	TEST_ASSERT_NOT_NULL(ts_params->session_mpool, "session mempool allocation failed");
425*2c0519b2SVolodymyr Fialko 
426*2c0519b2SVolodymyr Fialko 	/* Setup queue pair conf params */
427*2c0519b2SVolodymyr Fialko 	ts_params->conf.nb_queue_pairs = nb_queue_pairs;
428*2c0519b2SVolodymyr Fialko 	ts_params->conf.socket_id = SOCKET_ID_ANY;
429*2c0519b2SVolodymyr Fialko 	ts_params->conf.ff_disable = RTE_CRYPTODEV_FF_SECURITY;
430*2c0519b2SVolodymyr Fialko 	ts_params->qp_conf.nb_descriptors = MAX_NUM_OPS_INFLIGHT;
431*2c0519b2SVolodymyr Fialko 	ts_params->qp_conf.mp_session = ts_params->session_mpool;
432*2c0519b2SVolodymyr Fialko 
433*2c0519b2SVolodymyr Fialko 	if (capabilities_inspect() != 0)
434*2c0519b2SVolodymyr Fialko 		return TEST_FAILED;
435*2c0519b2SVolodymyr Fialko 
436*2c0519b2SVolodymyr Fialko 	/* Allocate memory based on max supported capabilities */
437*2c0519b2SVolodymyr Fialko 	for (i = 0; i < nb_common_sym_caps; i++) {
438*2c0519b2SVolodymyr Fialko 		cap = &common_symm_capas[i];
439*2c0519b2SVolodymyr Fialko 		switch (cap->xform_type) {
440*2c0519b2SVolodymyr Fialko 		case RTE_CRYPTO_SYM_XFORM_AUTH:
441*2c0519b2SVolodymyr Fialko 			ts_params->key.len = RTE_MAX(ts_params->key.len, cap->auth.key_size.max);
442*2c0519b2SVolodymyr Fialko 			ts_params->digest.len = RTE_MAX(ts_params->digest.len,
443*2c0519b2SVolodymyr Fialko 							cap->auth.digest_size.max);
444*2c0519b2SVolodymyr Fialko 			ts_params->aad.len = RTE_MAX(ts_params->aad.len, cap->auth.aad_size.max);
445*2c0519b2SVolodymyr Fialko 			ts_params->iv.len = RTE_MAX(ts_params->iv.len, cap->auth.iv_size.max);
446*2c0519b2SVolodymyr Fialko 			break;
447*2c0519b2SVolodymyr Fialko 		case RTE_CRYPTO_SYM_XFORM_CIPHER:
448*2c0519b2SVolodymyr Fialko 			ts_params->key.len = RTE_MAX(ts_params->key.len, cap->cipher.key_size.max);
449*2c0519b2SVolodymyr Fialko 			ts_params->iv.len = RTE_MAX(ts_params->iv.len, cap->cipher.iv_size.max);
450*2c0519b2SVolodymyr Fialko 			break;
451*2c0519b2SVolodymyr Fialko 		case RTE_CRYPTO_SYM_XFORM_AEAD:
452*2c0519b2SVolodymyr Fialko 			ts_params->key.len = RTE_MAX(ts_params->key.len, cap->aead.key_size.max);
453*2c0519b2SVolodymyr Fialko 			ts_params->digest.len = RTE_MAX(ts_params->digest.len,
454*2c0519b2SVolodymyr Fialko 							cap->aead.digest_size.max);
455*2c0519b2SVolodymyr Fialko 			ts_params->aad.len = RTE_MAX(ts_params->aad.len, cap->aead.aad_size.max);
456*2c0519b2SVolodymyr Fialko 			ts_params->iv.len = RTE_MAX(ts_params->iv.len, cap->aead.iv_size.max);
457*2c0519b2SVolodymyr Fialko 			break;
458*2c0519b2SVolodymyr Fialko 		default:
459*2c0519b2SVolodymyr Fialko 			rte_panic("Wrong capability profile type: %i\n", cap->xform_type);
460*2c0519b2SVolodymyr Fialko 			break;
461*2c0519b2SVolodymyr Fialko 		}
462*2c0519b2SVolodymyr Fialko 	}
463*2c0519b2SVolodymyr Fialko 
464*2c0519b2SVolodymyr Fialko 	if (ts_params->key.len) {
465*2c0519b2SVolodymyr Fialko 		ts_params->key.mem = rte_zmalloc(NULL, ts_params->key.len, 0);
466*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->key.mem, "Key mem allocation failed\n");
467*2c0519b2SVolodymyr Fialko 		pattern_fill(ts_params->key.mem, "*Secret key*", ts_params->key.len);
468*2c0519b2SVolodymyr Fialko 	}
469*2c0519b2SVolodymyr Fialko 	if (ts_params->digest.len) {
470*2c0519b2SVolodymyr Fialko 		ts_params->digest.mem = rte_zmalloc(NULL, ts_params->digest.len, 16);
471*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->digest.mem, "digest mem allocation failed\n");
472*2c0519b2SVolodymyr Fialko 		ts_params->expected_digest.len = ts_params->digest.len;
473*2c0519b2SVolodymyr Fialko 		ts_params->expected_digest.mem = rte_zmalloc(NULL, ts_params->digest.len, 0);
474*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->expected_digest.mem,
475*2c0519b2SVolodymyr Fialko 				     "Expected digest allocation failed\n");
476*2c0519b2SVolodymyr Fialko 	}
477*2c0519b2SVolodymyr Fialko 	if (ts_params->aad.len) {
478*2c0519b2SVolodymyr Fialko 		ts_params->aad.mem = rte_zmalloc(NULL, ts_params->aad.len, 16);
479*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->aad.mem, "aad mem allocation failed\n");
480*2c0519b2SVolodymyr Fialko 		ts_params->expected_aad.len = ts_params->aad.len;
481*2c0519b2SVolodymyr Fialko 		ts_params->expected_aad.mem = rte_zmalloc(NULL, ts_params->expected_aad.len, 0);
482*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->expected_aad.mem,
483*2c0519b2SVolodymyr Fialko 				     "Expected aad allocation failed\n");
484*2c0519b2SVolodymyr Fialko 	}
485*2c0519b2SVolodymyr Fialko 	if (ts_params->iv.len) {
486*2c0519b2SVolodymyr Fialko 		ts_params->iv.mem = rte_zmalloc(NULL, ts_params->iv.len, 0);
487*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_NOT_NULL(ts_params->iv.mem, "iv mem allocation failed\n");
488*2c0519b2SVolodymyr Fialko 		pattern_fill(ts_params->iv.mem, "IV", ts_params->iv.len);
489*2c0519b2SVolodymyr Fialko 	}
490*2c0519b2SVolodymyr Fialko 
491*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
492*2c0519b2SVolodymyr Fialko }
493*2c0519b2SVolodymyr Fialko 
494*2c0519b2SVolodymyr Fialko static void
crosscheck_fini(void)495*2c0519b2SVolodymyr Fialko crosscheck_fini(void)
496*2c0519b2SVolodymyr Fialko {
497*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
498*2c0519b2SVolodymyr Fialko 
499*2c0519b2SVolodymyr Fialko 	rte_mempool_free(ts_params->mbuf_pool);
500*2c0519b2SVolodymyr Fialko 	rte_mempool_free(ts_params->op_mpool);
501*2c0519b2SVolodymyr Fialko 	rte_mempool_free(ts_params->session_mpool);
502*2c0519b2SVolodymyr Fialko 	rte_free(ts_params->key.mem);
503*2c0519b2SVolodymyr Fialko 	rte_free(ts_params->digest.mem);
504*2c0519b2SVolodymyr Fialko 	rte_free(ts_params->aad.mem);
505*2c0519b2SVolodymyr Fialko 	rte_free(ts_params->iv.mem);
506*2c0519b2SVolodymyr Fialko }
507*2c0519b2SVolodymyr Fialko 
508*2c0519b2SVolodymyr Fialko static int
dev_configure_and_start(uint64_t ff_disable)509*2c0519b2SVolodymyr Fialko dev_configure_and_start(uint64_t ff_disable)
510*2c0519b2SVolodymyr Fialko {
511*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
512*2c0519b2SVolodymyr Fialko 	uint8_t i, dev_id;
513*2c0519b2SVolodymyr Fialko 	uint16_t qp_id;
514*2c0519b2SVolodymyr Fialko 
515*2c0519b2SVolodymyr Fialko 	/* Reconfigure device to default parameters */
516*2c0519b2SVolodymyr Fialko 	ts_params->conf.ff_disable = ff_disable;
517*2c0519b2SVolodymyr Fialko 
518*2c0519b2SVolodymyr Fialko 	/* Configure cryptodevs */
519*2c0519b2SVolodymyr Fialko 	for (i = 0; i < ts_params->valid_dev_count; i++) {
520*2c0519b2SVolodymyr Fialko 		dev_id = ts_params->valid_devs[i];
521*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_SUCCESS(rte_cryptodev_configure(dev_id, &ts_params->conf),
522*2c0519b2SVolodymyr Fialko 				    "Failed to configure cryptodev %u with %u qps",
523*2c0519b2SVolodymyr Fialko 				    dev_id, ts_params->conf.nb_queue_pairs);
524*2c0519b2SVolodymyr Fialko 
525*2c0519b2SVolodymyr Fialko 		for (qp_id = 0; qp_id < ts_params->conf.nb_queue_pairs; qp_id++) {
526*2c0519b2SVolodymyr Fialko 			TEST_ASSERT_SUCCESS(rte_cryptodev_queue_pair_setup(
527*2c0519b2SVolodymyr Fialko 				dev_id, qp_id, &ts_params->qp_conf,
528*2c0519b2SVolodymyr Fialko 				rte_cryptodev_socket_id(dev_id)),
529*2c0519b2SVolodymyr Fialko 				"Failed to setup queue pair %u on cryptodev %u",
530*2c0519b2SVolodymyr Fialko 				qp_id, dev_id);
531*2c0519b2SVolodymyr Fialko 		}
532*2c0519b2SVolodymyr Fialko 		rte_cryptodev_stats_reset(dev_id);
533*2c0519b2SVolodymyr Fialko 
534*2c0519b2SVolodymyr Fialko 		/* Start the device */
535*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_SUCCESS(rte_cryptodev_start(dev_id), "Failed to start cryptodev %u",
536*2c0519b2SVolodymyr Fialko 				    dev_id);
537*2c0519b2SVolodymyr Fialko 	}
538*2c0519b2SVolodymyr Fialko 
539*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
540*2c0519b2SVolodymyr Fialko }
541*2c0519b2SVolodymyr Fialko 
542*2c0519b2SVolodymyr Fialko static int
crosscheck_suite_setup(void)543*2c0519b2SVolodymyr Fialko crosscheck_suite_setup(void)
544*2c0519b2SVolodymyr Fialko {
545*2c0519b2SVolodymyr Fialko 	dev_configure_and_start(RTE_CRYPTODEV_FF_SECURITY);
546*2c0519b2SVolodymyr Fialko 
547*2c0519b2SVolodymyr Fialko 	return 0;
548*2c0519b2SVolodymyr Fialko }
549*2c0519b2SVolodymyr Fialko 
550*2c0519b2SVolodymyr Fialko static void
crosscheck_suite_teardown(void)551*2c0519b2SVolodymyr Fialko crosscheck_suite_teardown(void)
552*2c0519b2SVolodymyr Fialko {
553*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
554*2c0519b2SVolodymyr Fialko 	uint8_t i, dev_id;
555*2c0519b2SVolodymyr Fialko 
556*2c0519b2SVolodymyr Fialko 	for (i = 0; i < ts_params->valid_dev_count; i++) {
557*2c0519b2SVolodymyr Fialko 		dev_id = ts_params->valid_devs[i];
558*2c0519b2SVolodymyr Fialko 		rte_cryptodev_stop(dev_id);
559*2c0519b2SVolodymyr Fialko 	}
560*2c0519b2SVolodymyr Fialko }
561*2c0519b2SVolodymyr Fialko 
562*2c0519b2SVolodymyr Fialko static struct rte_crypto_op *
crypto_request_process(uint8_t dev_id,struct rte_crypto_op * op)563*2c0519b2SVolodymyr Fialko crypto_request_process(uint8_t dev_id, struct rte_crypto_op *op)
564*2c0519b2SVolodymyr Fialko {
565*2c0519b2SVolodymyr Fialko 	struct rte_crypto_op *res = NULL;
566*2c0519b2SVolodymyr Fialko 
567*2c0519b2SVolodymyr Fialko 	if (rte_cryptodev_enqueue_burst(dev_id, 0, &op, 1) != 1) {
568*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Error sending packet for encryption\n");
569*2c0519b2SVolodymyr Fialko 		return NULL;
570*2c0519b2SVolodymyr Fialko 	}
571*2c0519b2SVolodymyr Fialko 
572*2c0519b2SVolodymyr Fialko 	while (rte_cryptodev_dequeue_burst(dev_id, 0, &res, 1) == 0)
573*2c0519b2SVolodymyr Fialko 		rte_pause();
574*2c0519b2SVolodymyr Fialko 
575*2c0519b2SVolodymyr Fialko 	if (res->status != RTE_CRYPTO_OP_STATUS_SUCCESS) {
576*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Operation status %d\n", res->status);
577*2c0519b2SVolodymyr Fialko 		return NULL;
578*2c0519b2SVolodymyr Fialko 	}
579*2c0519b2SVolodymyr Fialko 
580*2c0519b2SVolodymyr Fialko 	if (res != op) {
581*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Unexpected operation received!\n");
582*2c0519b2SVolodymyr Fialko 		rte_crypto_op_free(res);
583*2c0519b2SVolodymyr Fialko 		return NULL;
584*2c0519b2SVolodymyr Fialko 	}
585*2c0519b2SVolodymyr Fialko 
586*2c0519b2SVolodymyr Fialko 	return res;
587*2c0519b2SVolodymyr Fialko }
588*2c0519b2SVolodymyr Fialko 
589*2c0519b2SVolodymyr Fialko static struct rte_cryptodev_sym_session*
session_create(const struct crosscheck_test_profile * profile,uint8_t dev_id,enum crypto_op_type op_type)590*2c0519b2SVolodymyr Fialko session_create(const struct crosscheck_test_profile *profile, uint8_t dev_id,
591*2c0519b2SVolodymyr Fialko 	       enum crypto_op_type op_type)
592*2c0519b2SVolodymyr Fialko {
593*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
594*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_sym_session *session;
595*2c0519b2SVolodymyr Fialko 	struct rte_crypto_sym_xform xform;
596*2c0519b2SVolodymyr Fialko 
597*2c0519b2SVolodymyr Fialko 	memset(&xform, 0, sizeof(xform));
598*2c0519b2SVolodymyr Fialko 
599*2c0519b2SVolodymyr Fialko 	switch (profile->xform_type) {
600*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
601*2c0519b2SVolodymyr Fialko 		xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
602*2c0519b2SVolodymyr Fialko 		xform.next = NULL;
603*2c0519b2SVolodymyr Fialko 		xform.auth.algo = profile->algo;
604*2c0519b2SVolodymyr Fialko 		xform.auth.op = op_type == OP_ENCRYPT ? RTE_CRYPTO_AUTH_OP_GENERATE :
605*2c0519b2SVolodymyr Fialko 			RTE_CRYPTO_AUTH_OP_VERIFY;
606*2c0519b2SVolodymyr Fialko 		xform.auth.digest_length = profile->digest_size;
607*2c0519b2SVolodymyr Fialko 		xform.auth.key.length = profile->key_size;
608*2c0519b2SVolodymyr Fialko 		xform.auth.key.data = ts_params->key.mem;
609*2c0519b2SVolodymyr Fialko 		xform.auth.iv.length = profile->iv_size;
610*2c0519b2SVolodymyr Fialko 		xform.auth.iv.offset = IV_OFFSET;
611*2c0519b2SVolodymyr Fialko 		break;
612*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
613*2c0519b2SVolodymyr Fialko 		xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
614*2c0519b2SVolodymyr Fialko 		xform.next = NULL;
615*2c0519b2SVolodymyr Fialko 		xform.cipher.algo = profile->algo;
616*2c0519b2SVolodymyr Fialko 		xform.cipher.op = op_type == OP_ENCRYPT ? RTE_CRYPTO_CIPHER_OP_ENCRYPT :
617*2c0519b2SVolodymyr Fialko 			RTE_CRYPTO_CIPHER_OP_DECRYPT;
618*2c0519b2SVolodymyr Fialko 		xform.cipher.key.length = profile->key_size;
619*2c0519b2SVolodymyr Fialko 		xform.cipher.key.data = ts_params->key.mem;
620*2c0519b2SVolodymyr Fialko 		xform.cipher.iv.length = profile->iv_size;
621*2c0519b2SVolodymyr Fialko 		xform.cipher.iv.offset = IV_OFFSET;
622*2c0519b2SVolodymyr Fialko 		break;
623*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
624*2c0519b2SVolodymyr Fialko 		xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
625*2c0519b2SVolodymyr Fialko 		xform.next = NULL;
626*2c0519b2SVolodymyr Fialko 		xform.aead.algo = profile->algo;
627*2c0519b2SVolodymyr Fialko 		xform.aead.op = op_type == OP_ENCRYPT ? RTE_CRYPTO_AEAD_OP_ENCRYPT :
628*2c0519b2SVolodymyr Fialko 			RTE_CRYPTO_AEAD_OP_DECRYPT;
629*2c0519b2SVolodymyr Fialko 		xform.aead.digest_length = profile->digest_size;
630*2c0519b2SVolodymyr Fialko 		xform.aead.key.length = profile->key_size;
631*2c0519b2SVolodymyr Fialko 		xform.aead.key.data = ts_params->key.mem;
632*2c0519b2SVolodymyr Fialko 		xform.aead.iv.length = profile->iv_size;
633*2c0519b2SVolodymyr Fialko 		xform.aead.iv.offset = IV_OFFSET;
634*2c0519b2SVolodymyr Fialko 		xform.aead.aad_length = profile->aad_size;
635*2c0519b2SVolodymyr Fialko 		break;
636*2c0519b2SVolodymyr Fialko 	default:
637*2c0519b2SVolodymyr Fialko 		return NULL;
638*2c0519b2SVolodymyr Fialko 	}
639*2c0519b2SVolodymyr Fialko 
640*2c0519b2SVolodymyr Fialko 	session = rte_cryptodev_sym_session_create(dev_id, &xform, testsuite_params.session_mpool);
641*2c0519b2SVolodymyr Fialko 
642*2c0519b2SVolodymyr Fialko 	return session;
643*2c0519b2SVolodymyr Fialko }
644*2c0519b2SVolodymyr Fialko 
645*2c0519b2SVolodymyr Fialko static struct rte_mbuf*
mbuf_create(const uint8_t * input_buf,uint16_t input_len)646*2c0519b2SVolodymyr Fialko mbuf_create(const uint8_t *input_buf, uint16_t input_len)
647*2c0519b2SVolodymyr Fialko {
648*2c0519b2SVolodymyr Fialko 	struct rte_mbuf *pkt;
649*2c0519b2SVolodymyr Fialko 	uint8_t *pkt_data;
650*2c0519b2SVolodymyr Fialko 
651*2c0519b2SVolodymyr Fialko 	pkt = rte_pktmbuf_alloc(testsuite_params.mbuf_pool);
652*2c0519b2SVolodymyr Fialko 	if (pkt == NULL) {
653*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1,  "Failed to allocate input buffer in mempool");
654*2c0519b2SVolodymyr Fialko 		return NULL;
655*2c0519b2SVolodymyr Fialko 	}
656*2c0519b2SVolodymyr Fialko 
657*2c0519b2SVolodymyr Fialko 	/* zeroing tailroom */
658*2c0519b2SVolodymyr Fialko 	memset(rte_pktmbuf_mtod(pkt, uint8_t *), 0, rte_pktmbuf_tailroom(pkt));
659*2c0519b2SVolodymyr Fialko 
660*2c0519b2SVolodymyr Fialko 	pkt_data = (uint8_t *)rte_pktmbuf_append(pkt, input_len);
661*2c0519b2SVolodymyr Fialko 	if (pkt_data == NULL) {
662*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "no room to append data, len: %d", input_len);
663*2c0519b2SVolodymyr Fialko 		goto error;
664*2c0519b2SVolodymyr Fialko 	}
665*2c0519b2SVolodymyr Fialko 	rte_memcpy(pkt_data, input_buf, input_len);
666*2c0519b2SVolodymyr Fialko 
667*2c0519b2SVolodymyr Fialko 	return pkt;
668*2c0519b2SVolodymyr Fialko error:
669*2c0519b2SVolodymyr Fialko 	rte_pktmbuf_free(pkt);
670*2c0519b2SVolodymyr Fialko 	return NULL;
671*2c0519b2SVolodymyr Fialko }
672*2c0519b2SVolodymyr Fialko 
673*2c0519b2SVolodymyr Fialko static struct rte_crypto_op*
operation_create(const struct crosscheck_test_profile * profile,struct rte_mbuf * ibuf,enum crypto_op_type op_type)674*2c0519b2SVolodymyr Fialko operation_create(const struct crosscheck_test_profile *profile,
675*2c0519b2SVolodymyr Fialko 		 struct rte_mbuf *ibuf, enum crypto_op_type op_type)
676*2c0519b2SVolodymyr Fialko {
677*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
678*2c0519b2SVolodymyr Fialko 	uint8_t *digest_data = NULL, *aad_data = NULL, *iv_ptr = NULL;
679*2c0519b2SVolodymyr Fialko 	uint16_t aad_size, digest_size, plaintext_len;
680*2c0519b2SVolodymyr Fialko 	struct rte_crypto_sym_op *sym_op;
681*2c0519b2SVolodymyr Fialko 	struct rte_crypto_op *op;
682*2c0519b2SVolodymyr Fialko 
683*2c0519b2SVolodymyr Fialko 	op = rte_crypto_op_alloc(ts_params->op_mpool, RTE_CRYPTO_OP_TYPE_SYMMETRIC);
684*2c0519b2SVolodymyr Fialko 	if (op == NULL) {
685*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Failed to allocate symmetric crypto operation struct");
686*2c0519b2SVolodymyr Fialko 		return NULL;
687*2c0519b2SVolodymyr Fialko 	}
688*2c0519b2SVolodymyr Fialko 
689*2c0519b2SVolodymyr Fialko 	plaintext_len = profile->input_buf_len;
690*2c0519b2SVolodymyr Fialko 	aad_size = profile->aad_size;
691*2c0519b2SVolodymyr Fialko 	digest_size = profile->digest_size;
692*2c0519b2SVolodymyr Fialko 
693*2c0519b2SVolodymyr Fialko 	if (aad_size) {
694*2c0519b2SVolodymyr Fialko 		aad_data = ts_params->aad.mem;
695*2c0519b2SVolodymyr Fialko 		if (op_type == OP_ENCRYPT)
696*2c0519b2SVolodymyr Fialko 			pattern_fill(aad_data, "This is an aad.", aad_size);
697*2c0519b2SVolodymyr Fialko 	}
698*2c0519b2SVolodymyr Fialko 
699*2c0519b2SVolodymyr Fialko 	if (digest_size) {
700*2c0519b2SVolodymyr Fialko 		digest_data = ts_params->digest.mem;
701*2c0519b2SVolodymyr Fialko 		if (op_type == OP_ENCRYPT)
702*2c0519b2SVolodymyr Fialko 			memset(digest_data, 0, sizeof(digest_size));
703*2c0519b2SVolodymyr Fialko 	}
704*2c0519b2SVolodymyr Fialko 
705*2c0519b2SVolodymyr Fialko 	sym_op = op->sym;
706*2c0519b2SVolodymyr Fialko 	memset(sym_op, 0, sizeof(*sym_op));
707*2c0519b2SVolodymyr Fialko 
708*2c0519b2SVolodymyr Fialko 	iv_ptr = rte_crypto_op_ctod_offset(op, uint8_t *, IV_OFFSET);
709*2c0519b2SVolodymyr Fialko 	rte_memcpy(iv_ptr, ts_params->iv.mem, profile->iv_size);
710*2c0519b2SVolodymyr Fialko 
711*2c0519b2SVolodymyr Fialko 	switch (profile->xform_type) {
712*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AUTH:
713*2c0519b2SVolodymyr Fialko 		sym_op->auth.digest.data = digest_data;
714*2c0519b2SVolodymyr Fialko 		sym_op->auth.digest.phys_addr = rte_malloc_virt2iova(sym_op->auth.digest.data);
715*2c0519b2SVolodymyr Fialko 		sym_op->auth.data.length = plaintext_len;
716*2c0519b2SVolodymyr Fialko 		break;
717*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_CIPHER:
718*2c0519b2SVolodymyr Fialko 		sym_op->cipher.data.length = plaintext_len;
719*2c0519b2SVolodymyr Fialko 		break;
720*2c0519b2SVolodymyr Fialko 	case RTE_CRYPTO_SYM_XFORM_AEAD:
721*2c0519b2SVolodymyr Fialko 		sym_op->aead.aad.data = aad_data;
722*2c0519b2SVolodymyr Fialko 		sym_op->aead.aad.phys_addr = rte_malloc_virt2iova(sym_op->aead.aad.data);
723*2c0519b2SVolodymyr Fialko 		sym_op->aead.digest.data = digest_data;
724*2c0519b2SVolodymyr Fialko 		sym_op->aead.digest.phys_addr = rte_malloc_virt2iova(sym_op->aead.digest.data);
725*2c0519b2SVolodymyr Fialko 		sym_op->aead.data.offset = 0;
726*2c0519b2SVolodymyr Fialko 		sym_op->aead.data.length = plaintext_len;
727*2c0519b2SVolodymyr Fialko 		break;
728*2c0519b2SVolodymyr Fialko 	default:
729*2c0519b2SVolodymyr Fialko 		goto error;
730*2c0519b2SVolodymyr Fialko 	}
731*2c0519b2SVolodymyr Fialko 
732*2c0519b2SVolodymyr Fialko 	sym_op->m_src = ibuf;
733*2c0519b2SVolodymyr Fialko 
734*2c0519b2SVolodymyr Fialko 	return op;
735*2c0519b2SVolodymyr Fialko 
736*2c0519b2SVolodymyr Fialko error:
737*2c0519b2SVolodymyr Fialko 	rte_crypto_op_free(op);
738*2c0519b2SVolodymyr Fialko 	return NULL;
739*2c0519b2SVolodymyr Fialko }
740*2c0519b2SVolodymyr Fialko 
741*2c0519b2SVolodymyr Fialko static void
mbuf_to_buf_copy(const struct rte_mbuf * m,uint8_t * res_buf,uint16_t * len)742*2c0519b2SVolodymyr Fialko mbuf_to_buf_copy(const struct rte_mbuf *m, uint8_t *res_buf, uint16_t *len)
743*2c0519b2SVolodymyr Fialko {
744*2c0519b2SVolodymyr Fialko 	const uint8_t *out;
745*2c0519b2SVolodymyr Fialko 
746*2c0519b2SVolodymyr Fialko 	*len = m->pkt_len;
747*2c0519b2SVolodymyr Fialko 	out = rte_pktmbuf_read(m, 0, *len, res_buf);
748*2c0519b2SVolodymyr Fialko 	/* Single segment buffer */
749*2c0519b2SVolodymyr Fialko 	if (out != res_buf)
750*2c0519b2SVolodymyr Fialko 		memcpy(res_buf, out, *len);
751*2c0519b2SVolodymyr Fialko }
752*2c0519b2SVolodymyr Fialko 
753*2c0519b2SVolodymyr Fialko static int
single_dev_process(const struct crosscheck_test_profile * profile,uint16_t dev_id,enum crypto_op_type op_type,const uint8_t * input_buf,uint16_t input_len,uint8_t * output_buf,uint16_t * output_len)754*2c0519b2SVolodymyr Fialko single_dev_process(const struct crosscheck_test_profile *profile, uint16_t dev_id, enum
755*2c0519b2SVolodymyr Fialko 		   crypto_op_type op_type, const uint8_t *input_buf, uint16_t input_len,
756*2c0519b2SVolodymyr Fialko 		   uint8_t *output_buf, uint16_t *output_len)
757*2c0519b2SVolodymyr Fialko {
758*2c0519b2SVolodymyr Fialko 	struct rte_cryptodev_sym_session *session = NULL;
759*2c0519b2SVolodymyr Fialko 	struct rte_mbuf *ibuf = NULL, *obuf = NULL;
760*2c0519b2SVolodymyr Fialko 	struct rte_crypto_op *op = NULL;
761*2c0519b2SVolodymyr Fialko 	int ret = -1;
762*2c0519b2SVolodymyr Fialko 
763*2c0519b2SVolodymyr Fialko 	session = session_create(profile, dev_id, op_type);
764*2c0519b2SVolodymyr Fialko 	if (session == NULL)
765*2c0519b2SVolodymyr Fialko 		goto error;
766*2c0519b2SVolodymyr Fialko 
767*2c0519b2SVolodymyr Fialko 	ibuf = mbuf_create(input_buf, input_len);
768*2c0519b2SVolodymyr Fialko 	if (ibuf == NULL)
769*2c0519b2SVolodymyr Fialko 		goto error;
770*2c0519b2SVolodymyr Fialko 
771*2c0519b2SVolodymyr Fialko 	op = operation_create(profile, ibuf, op_type);
772*2c0519b2SVolodymyr Fialko 	if (op == NULL)
773*2c0519b2SVolodymyr Fialko 		goto error;
774*2c0519b2SVolodymyr Fialko 
775*2c0519b2SVolodymyr Fialko 	debug_hexdump(stdout, "Input:", rte_pktmbuf_mtod(ibuf, uint8_t*), ibuf->pkt_len);
776*2c0519b2SVolodymyr Fialko 
777*2c0519b2SVolodymyr Fialko 	rte_crypto_op_attach_sym_session(op, session);
778*2c0519b2SVolodymyr Fialko 
779*2c0519b2SVolodymyr Fialko 	struct rte_crypto_op *res = crypto_request_process(dev_id, op);
780*2c0519b2SVolodymyr Fialko 	if (res == NULL)
781*2c0519b2SVolodymyr Fialko 		goto error;
782*2c0519b2SVolodymyr Fialko 
783*2c0519b2SVolodymyr Fialko 	obuf = op->sym->m_src;
784*2c0519b2SVolodymyr Fialko 	if (obuf == NULL) {
785*2c0519b2SVolodymyr Fialko 		RTE_LOG(ERR, USER1, "Invalid packet received\n");
786*2c0519b2SVolodymyr Fialko 		goto error;
787*2c0519b2SVolodymyr Fialko 	}
788*2c0519b2SVolodymyr Fialko 	mbuf_to_buf_copy(obuf, output_buf, output_len);
789*2c0519b2SVolodymyr Fialko 
790*2c0519b2SVolodymyr Fialko 	ret = 0;
791*2c0519b2SVolodymyr Fialko 
792*2c0519b2SVolodymyr Fialko error:
793*2c0519b2SVolodymyr Fialko 	if (session != NULL) {
794*2c0519b2SVolodymyr Fialko 		int sret;
795*2c0519b2SVolodymyr Fialko 		sret = rte_cryptodev_sym_session_free(dev_id, session);
796*2c0519b2SVolodymyr Fialko 		RTE_VERIFY(sret == 0);
797*2c0519b2SVolodymyr Fialko 	}
798*2c0519b2SVolodymyr Fialko 	rte_pktmbuf_free(ibuf);
799*2c0519b2SVolodymyr Fialko 	rte_crypto_op_free(op);
800*2c0519b2SVolodymyr Fialko 	return ret;
801*2c0519b2SVolodymyr Fialko }
802*2c0519b2SVolodymyr Fialko 
803*2c0519b2SVolodymyr Fialko static int
buffers_compare(const uint8_t * expected,uint16_t expected_len,const uint8_t * received,uint16_t received_len)804*2c0519b2SVolodymyr Fialko buffers_compare(const uint8_t *expected, uint16_t expected_len,
805*2c0519b2SVolodymyr Fialko 		const uint8_t *received, uint16_t received_len)
806*2c0519b2SVolodymyr Fialko {
807*2c0519b2SVolodymyr Fialko 	TEST_ASSERT_EQUAL(expected_len, received_len, "Length mismatch %d != %d !\n",
808*2c0519b2SVolodymyr Fialko 			  expected_len, received_len);
809*2c0519b2SVolodymyr Fialko 
810*2c0519b2SVolodymyr Fialko 	if (memcmp(expected, received, expected_len)) {
811*2c0519b2SVolodymyr Fialko 		rte_hexdump(rte_log_get_stream(), "expected", expected, expected_len);
812*2c0519b2SVolodymyr Fialko 		rte_hexdump(rte_log_get_stream(), "received", received, expected_len);
813*2c0519b2SVolodymyr Fialko 		return TEST_FAILED;
814*2c0519b2SVolodymyr Fialko 	}
815*2c0519b2SVolodymyr Fialko 
816*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
817*2c0519b2SVolodymyr Fialko }
818*2c0519b2SVolodymyr Fialko 
819*2c0519b2SVolodymyr Fialko static int
crosscheck_all_devices(const struct crosscheck_test_profile * profile,enum crypto_op_type op_type,const uint8_t * input_text,uint16_t input_len,uint8_t * output_text,uint16_t * output_len)820*2c0519b2SVolodymyr Fialko crosscheck_all_devices(const struct crosscheck_test_profile *profile, enum crypto_op_type op_type,
821*2c0519b2SVolodymyr Fialko 		       const uint8_t *input_text, uint16_t input_len, uint8_t *output_text,
822*2c0519b2SVolodymyr Fialko 		       uint16_t *output_len)
823*2c0519b2SVolodymyr Fialko {
824*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
825*2c0519b2SVolodymyr Fialko 	uint16_t len = 0, expected_len = 0;
826*2c0519b2SVolodymyr Fialko 	uint8_t expected_text[MBUF_SIZE];
827*2c0519b2SVolodymyr Fialko 	uint8_t i, dev_id;
828*2c0519b2SVolodymyr Fialko 	int status;
829*2c0519b2SVolodymyr Fialko 
830*2c0519b2SVolodymyr Fialko 
831*2c0519b2SVolodymyr Fialko 	for (i = 0; i < ts_params->valid_dev_count; i++) {
832*2c0519b2SVolodymyr Fialko 		dev_id = ts_params->valid_devs[i];
833*2c0519b2SVolodymyr Fialko 		status = single_dev_process(profile, dev_id, op_type, input_text, input_len,
834*2c0519b2SVolodymyr Fialko 					    output_text, &len);
835*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_SUCCESS(status, "Error occurred during processing");
836*2c0519b2SVolodymyr Fialko 
837*2c0519b2SVolodymyr Fialko 		if (i == 0) {
838*2c0519b2SVolodymyr Fialko 			/* First device, copy data for future comparisons */
839*2c0519b2SVolodymyr Fialko 			memcpy(expected_text, output_text, len);
840*2c0519b2SVolodymyr Fialko 			memcpy(ts_params->expected_digest.mem, ts_params->digest.mem,
841*2c0519b2SVolodymyr Fialko 			       profile->digest_size);
842*2c0519b2SVolodymyr Fialko 			memcpy(ts_params->expected_aad.mem, ts_params->aad.mem, profile->aad_size);
843*2c0519b2SVolodymyr Fialko 			expected_len = len;
844*2c0519b2SVolodymyr Fialko 		} else {
845*2c0519b2SVolodymyr Fialko 			/* Compare output against expected(first) output */
846*2c0519b2SVolodymyr Fialko 			TEST_ASSERT_SUCCESS(buffers_compare(expected_text, expected_len,
847*2c0519b2SVolodymyr Fialko 					output_text, len),
848*2c0519b2SVolodymyr Fialko 					"Text mismatch occurred on dev %i\n", dev_id);
849*2c0519b2SVolodymyr Fialko 			TEST_ASSERT_SUCCESS(buffers_compare(ts_params->expected_digest.mem,
850*2c0519b2SVolodymyr Fialko 					profile->digest_size, ts_params->digest.mem,
851*2c0519b2SVolodymyr Fialko 					profile->digest_size),
852*2c0519b2SVolodymyr Fialko 					"Digest mismatch occurred on dev %i\n", dev_id);
853*2c0519b2SVolodymyr Fialko 			TEST_ASSERT_SUCCESS(buffers_compare(ts_params->expected_aad.mem,
854*2c0519b2SVolodymyr Fialko 					profile->aad_size, ts_params->aad.mem, profile->aad_size),
855*2c0519b2SVolodymyr Fialko 					"AAD mismatch occurred on dev %i\n", dev_id);
856*2c0519b2SVolodymyr Fialko 		}
857*2c0519b2SVolodymyr Fialko 
858*2c0519b2SVolodymyr Fialko 		RTE_LOG(DEBUG, USER1, "DEV ID: %u finished processing\n", dev_id);
859*2c0519b2SVolodymyr Fialko 		debug_hexdump(stdout, "Output: ", output_text, len);
860*2c0519b2SVolodymyr Fialko 		if (profile->digest_size)
861*2c0519b2SVolodymyr Fialko 			debug_hexdump(stdout, "Digest: ", ts_params->digest.mem,
862*2c0519b2SVolodymyr Fialko 				      profile->digest_size);
863*2c0519b2SVolodymyr Fialko 	}
864*2c0519b2SVolodymyr Fialko 
865*2c0519b2SVolodymyr Fialko 	*output_len = len;
866*2c0519b2SVolodymyr Fialko 
867*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
868*2c0519b2SVolodymyr Fialko }
869*2c0519b2SVolodymyr Fialko 
870*2c0519b2SVolodymyr Fialko static int
check_negative_all_devices(const struct crosscheck_test_profile * profile,enum crypto_op_type op_type,const uint8_t * input_text,uint16_t input_len)871*2c0519b2SVolodymyr Fialko check_negative_all_devices(const struct crosscheck_test_profile *profile,
872*2c0519b2SVolodymyr Fialko 			   enum crypto_op_type op_type, const uint8_t *input_text,
873*2c0519b2SVolodymyr Fialko 			   uint16_t input_len)
874*2c0519b2SVolodymyr Fialko {
875*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
876*2c0519b2SVolodymyr Fialko 
877*2c0519b2SVolodymyr Fialko 	uint8_t output_text[MBUF_SIZE];
878*2c0519b2SVolodymyr Fialko 	uint8_t i, dev_id;
879*2c0519b2SVolodymyr Fialko 	uint16_t len;
880*2c0519b2SVolodymyr Fialko 	int status;
881*2c0519b2SVolodymyr Fialko 
882*2c0519b2SVolodymyr Fialko 	for (i = 0; i < ts_params->valid_dev_count; i++) {
883*2c0519b2SVolodymyr Fialko 		dev_id = ts_params->valid_devs[i];
884*2c0519b2SVolodymyr Fialko 		status = single_dev_process(profile, dev_id, op_type, input_text, input_len,
885*2c0519b2SVolodymyr Fialko 					    output_text, &len);
886*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_FAIL(status, "Error occurred during processing negative case");
887*2c0519b2SVolodymyr Fialko 
888*2c0519b2SVolodymyr Fialko 	}
889*2c0519b2SVolodymyr Fialko 
890*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
891*2c0519b2SVolodymyr Fialko }
892*2c0519b2SVolodymyr Fialko 
893*2c0519b2SVolodymyr Fialko static int
crosscheck_with_profile_run(const struct crosscheck_test_profile * profile)894*2c0519b2SVolodymyr Fialko crosscheck_with_profile_run(const struct crosscheck_test_profile *profile)
895*2c0519b2SVolodymyr Fialko {
896*2c0519b2SVolodymyr Fialko 	struct crosscheck_testsuite_params *ts_params = &testsuite_params;
897*2c0519b2SVolodymyr Fialko 	uint8_t input_text[profile->input_buf_len];
898*2c0519b2SVolodymyr Fialko 	uint16_t output_len, encrypted_len;
899*2c0519b2SVolodymyr Fialko 	uint8_t encrypted_text[MBUF_SIZE];
900*2c0519b2SVolodymyr Fialko 	uint8_t output_text[MBUF_SIZE];
901*2c0519b2SVolodymyr Fialko 	int status;
902*2c0519b2SVolodymyr Fialko 
903*2c0519b2SVolodymyr Fialko 	memset(ts_params->digest.mem, 0, ts_params->digest.len);
904*2c0519b2SVolodymyr Fialko 	memset(ts_params->aad.mem, 0, ts_params->aad.len);
905*2c0519b2SVolodymyr Fialko 
906*2c0519b2SVolodymyr Fialko 	/* Encrypt Stage */
907*2c0519b2SVolodymyr Fialko 	RTE_LOG(DEBUG, USER1, "Executing encrypt stage\n");
908*2c0519b2SVolodymyr Fialko 	/* Fill input with incrementing pattern */
909*2c0519b2SVolodymyr Fialko 	incrementing_generate(input_text, 'a', profile->input_buf_len);
910*2c0519b2SVolodymyr Fialko 	status = crosscheck_all_devices(profile, OP_ENCRYPT, input_text, profile->input_buf_len,
911*2c0519b2SVolodymyr Fialko 					output_text, &output_len);
912*2c0519b2SVolodymyr Fialko 	TEST_ASSERT_SUCCESS(status, "Error occurred during encryption");
913*2c0519b2SVolodymyr Fialko 
914*2c0519b2SVolodymyr Fialko 	/* Decrypt Stage */
915*2c0519b2SVolodymyr Fialko 	RTE_LOG(DEBUG, USER1, "Executing decrypt stage\n");
916*2c0519b2SVolodymyr Fialko 	/* Set up encrypted data as input */
917*2c0519b2SVolodymyr Fialko 	encrypted_len = output_len;
918*2c0519b2SVolodymyr Fialko 	memcpy(encrypted_text, output_text, output_len);
919*2c0519b2SVolodymyr Fialko 	status = crosscheck_all_devices(profile, OP_DECRYPT, encrypted_text, encrypted_len,
920*2c0519b2SVolodymyr Fialko 					output_text, &output_len);
921*2c0519b2SVolodymyr Fialko 	TEST_ASSERT_SUCCESS(status, "Error occurred during decryption");
922*2c0519b2SVolodymyr Fialko 
923*2c0519b2SVolodymyr Fialko 	/* Negative Stage */
924*2c0519b2SVolodymyr Fialko 	RTE_LOG(DEBUG, USER1, "Executing negative stage\n");
925*2c0519b2SVolodymyr Fialko 	if (profile->digest_size) {
926*2c0519b2SVolodymyr Fialko 		/* Corrupting one byte of digest */
927*2c0519b2SVolodymyr Fialko 		ts_params->digest.mem[profile->digest_size - 1] += 1;
928*2c0519b2SVolodymyr Fialko 		status = check_negative_all_devices(profile, OP_DECRYPT, encrypted_text,
929*2c0519b2SVolodymyr Fialko 						    encrypted_len);
930*2c0519b2SVolodymyr Fialko 		TEST_ASSERT_SUCCESS(status, "Error occurred during decryption");
931*2c0519b2SVolodymyr Fialko 	}
932*2c0519b2SVolodymyr Fialko 
933*2c0519b2SVolodymyr Fialko 
934*2c0519b2SVolodymyr Fialko 	return TEST_SUCCESS;
935*2c0519b2SVolodymyr Fialko }
936*2c0519b2SVolodymyr Fialko 
937*2c0519b2SVolodymyr Fialko static int
test_crosscheck_unit(const void * ptr)938*2c0519b2SVolodymyr Fialko test_crosscheck_unit(const void *ptr)
939*2c0519b2SVolodymyr Fialko {
940*2c0519b2SVolodymyr Fialko 	const struct crosscheck_test_profile *profile = ptr;
941*2c0519b2SVolodymyr Fialko 
942*2c0519b2SVolodymyr Fialko 	if (profile->xform_type == RTE_CRYPTO_SYM_XFORM_NOT_SPECIFIED)
943*2c0519b2SVolodymyr Fialko 		return TEST_SKIPPED;
944*2c0519b2SVolodymyr Fialko 
945*2c0519b2SVolodymyr Fialko 	return crosscheck_with_profile_run(profile);
946*2c0519b2SVolodymyr Fialko }
947*2c0519b2SVolodymyr Fialko 
948*2c0519b2SVolodymyr Fialko static struct unit_test_suite*
sym_unit_test_suite_create(const struct rte_cryptodev_symmetric_capability * capa)949*2c0519b2SVolodymyr Fialko sym_unit_test_suite_create(const struct rte_cryptodev_symmetric_capability *capa)
950*2c0519b2SVolodymyr Fialko {
951*2c0519b2SVolodymyr Fialko 	size_t uts_size, total_size, input_sz;
952*2c0519b2SVolodymyr Fialko 	struct meta_test_suite *meta_ts;
953*2c0519b2SVolodymyr Fialko 	const char *suite_prefix = NULL;
954*2c0519b2SVolodymyr Fialko 	const char *algo_name = NULL;
955*2c0519b2SVolodymyr Fialko 	struct unit_test_suite *uts;
956*2c0519b2SVolodymyr Fialko 	uint64_t test_case_idx = 0;
957*2c0519b2SVolodymyr Fialko 	struct unit_test_case *utc;
958*2c0519b2SVolodymyr Fialko 	int cap_type;
959*2c0519b2SVolodymyr Fialko 	char *mem;
960*2c0519b2SVolodymyr Fialko 
961*2c0519b2SVolodymyr Fialko 	const char * const suite_prefix_strings[] = {
962*2c0519b2SVolodymyr Fialko 		[RTE_CRYPTO_SYM_XFORM_AUTH] = "Algo AUTH ",
963*2c0519b2SVolodymyr Fialko 		[RTE_CRYPTO_SYM_XFORM_CIPHER] = "Algo CIPHER ",
964*2c0519b2SVolodymyr Fialko 		[RTE_CRYPTO_SYM_XFORM_AEAD] = "Algo AEAD ",
965*2c0519b2SVolodymyr Fialko 	};
966*2c0519b2SVolodymyr Fialko 
967*2c0519b2SVolodymyr Fialko 	suite_prefix = suite_prefix_strings[capa->xform_type];
968*2c0519b2SVolodymyr Fialko 	algo_name = algo_name_get(capa);
969*2c0519b2SVolodymyr Fialko 
970*2c0519b2SVolodymyr Fialko 	/* Calculate size for test suite with all test cases +1 NULL case */
971*2c0519b2SVolodymyr Fialko 	uts_size = sizeof(struct unit_test_suite) +
972*2c0519b2SVolodymyr Fialko 		(NB_TEST_CASES_PER_ALGO + 1) * sizeof(struct unit_test_case);
973*2c0519b2SVolodymyr Fialko 
974*2c0519b2SVolodymyr Fialko 	/* Also allocate memory for suite meta data */
975*2c0519b2SVolodymyr Fialko 	total_size = uts_size + sizeof(struct meta_test_suite);
976*2c0519b2SVolodymyr Fialko 	mem = rte_zmalloc(NULL, total_size, 0);
977*2c0519b2SVolodymyr Fialko 	if (mem == NULL)
978*2c0519b2SVolodymyr Fialko 		return NULL;
979*2c0519b2SVolodymyr Fialko 	uts = (struct unit_test_suite *) mem;
980*2c0519b2SVolodymyr Fialko 	meta_ts = (struct meta_test_suite *) (mem + uts_size);
981*2c0519b2SVolodymyr Fialko 
982*2c0519b2SVolodymyr Fialko 	/* Initialize test suite */
983*2c0519b2SVolodymyr Fialko 	snprintf(meta_ts->suite_name, MAX_TEST_STRING_LEN, "%s '%s'", suite_prefix, algo_name);
984*2c0519b2SVolodymyr Fialko 	uts->suite_name = meta_ts->suite_name;
985*2c0519b2SVolodymyr Fialko 
986*2c0519b2SVolodymyr Fialko 	/* Initialize test cases */
987*2c0519b2SVolodymyr Fialko 	for (cap_type = 0; cap_type < CAPABILITY_TYPE_LAST; cap_type++) {
988*2c0519b2SVolodymyr Fialko 		for (input_sz = 0; input_sz < RTE_DIM(input_length); input_sz++) {
989*2c0519b2SVolodymyr Fialko 			meta_ts->profile[test_case_idx] = profile_create(
990*2c0519b2SVolodymyr Fialko 					capa, cap_type, input_length[input_sz]);
991*2c0519b2SVolodymyr Fialko 			utc = &uts->unit_test_cases[test_case_idx];
992*2c0519b2SVolodymyr Fialko 			utc->name = meta_ts->profile[test_case_idx].name;
993*2c0519b2SVolodymyr Fialko 			utc->data = (const void *) &meta_ts->profile[test_case_idx];
994*2c0519b2SVolodymyr Fialko 			utc->testcase_with_data = test_crosscheck_unit;
995*2c0519b2SVolodymyr Fialko 			utc->enabled = true;
996*2c0519b2SVolodymyr Fialko 
997*2c0519b2SVolodymyr Fialko 			test_case_idx += 1;
998*2c0519b2SVolodymyr Fialko 			RTE_VERIFY(test_case_idx <= NB_TEST_CASES_PER_ALGO);
999*2c0519b2SVolodymyr Fialko 		}
1000*2c0519b2SVolodymyr Fialko 	}
1001*2c0519b2SVolodymyr Fialko 
1002*2c0519b2SVolodymyr Fialko 	return uts;
1003*2c0519b2SVolodymyr Fialko }
1004*2c0519b2SVolodymyr Fialko 
1005*2c0519b2SVolodymyr Fialko static int
test_crosscheck(void)1006*2c0519b2SVolodymyr Fialko test_crosscheck(void)
1007*2c0519b2SVolodymyr Fialko {
1008*2c0519b2SVolodymyr Fialko 	struct unit_test_suite **test_suites = NULL;
1009*2c0519b2SVolodymyr Fialko 	int ret, i;
1010*2c0519b2SVolodymyr Fialko 
1011*2c0519b2SVolodymyr Fialko 	static struct unit_test_suite ts = {
1012*2c0519b2SVolodymyr Fialko 		.suite_name = "Crosscheck Unit Test Suite",
1013*2c0519b2SVolodymyr Fialko 		.setup = crosscheck_suite_setup,
1014*2c0519b2SVolodymyr Fialko 		.teardown = crosscheck_suite_teardown,
1015*2c0519b2SVolodymyr Fialko 		.unit_test_cases = {TEST_CASES_END()}
1016*2c0519b2SVolodymyr Fialko 	};
1017*2c0519b2SVolodymyr Fialko 
1018*2c0519b2SVolodymyr Fialko 	ret = crosscheck_init();
1019*2c0519b2SVolodymyr Fialko 	if (ret)
1020*2c0519b2SVolodymyr Fialko 		goto exit;
1021*2c0519b2SVolodymyr Fialko 
1022*2c0519b2SVolodymyr Fialko 	if (nb_common_sym_caps == 0) {
1023*2c0519b2SVolodymyr Fialko 		RTE_LOG(WARNING, USER1, "Cryptodevs don't have common capabilities\n");
1024*2c0519b2SVolodymyr Fialko 		ret = TEST_SKIPPED;
1025*2c0519b2SVolodymyr Fialko 		goto exit;
1026*2c0519b2SVolodymyr Fialko 	}
1027*2c0519b2SVolodymyr Fialko 
1028*2c0519b2SVolodymyr Fialko 	/* + 1 for NULL-end suite */
1029*2c0519b2SVolodymyr Fialko 	test_suites = rte_calloc(NULL, nb_common_sym_caps + 1, sizeof(struct unit_test_suite *), 0);
1030*2c0519b2SVolodymyr Fialko 	TEST_ASSERT_NOT_NULL(test_suites, "test_suites allocation failed");
1031*2c0519b2SVolodymyr Fialko 
1032*2c0519b2SVolodymyr Fialko 	/* Create test suite for each supported algorithm */
1033*2c0519b2SVolodymyr Fialko 	ts.unit_test_suites = test_suites;
1034*2c0519b2SVolodymyr Fialko 	for (i = 0; i < nb_common_sym_caps; i++)
1035*2c0519b2SVolodymyr Fialko 		ts.unit_test_suites[i] = sym_unit_test_suite_create(&common_symm_capas[i]);
1036*2c0519b2SVolodymyr Fialko 
1037*2c0519b2SVolodymyr Fialko 	ret = unit_test_suite_runner(&ts);
1038*2c0519b2SVolodymyr Fialko 
1039*2c0519b2SVolodymyr Fialko 	for (i = 0; i < nb_common_sym_caps; i++)
1040*2c0519b2SVolodymyr Fialko 		rte_free(ts.unit_test_suites[i]);
1041*2c0519b2SVolodymyr Fialko 
1042*2c0519b2SVolodymyr Fialko 	rte_free(test_suites);
1043*2c0519b2SVolodymyr Fialko 
1044*2c0519b2SVolodymyr Fialko exit:
1045*2c0519b2SVolodymyr Fialko 	crosscheck_fini();
1046*2c0519b2SVolodymyr Fialko 
1047*2c0519b2SVolodymyr Fialko 	return ret;
1048*2c0519b2SVolodymyr Fialko }
1049*2c0519b2SVolodymyr Fialko 
1050*2c0519b2SVolodymyr Fialko REGISTER_TEST_COMMAND(cryptodev_crosscheck, test_crosscheck);
1051