xref: /dpdk/app/test-crypto-perf/cperf_ops.c (revision 981a1ed32a7920bf0f5e2864ab1f78c296bdfaec)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #include <rte_cryptodev.h>
6 #include <rte_ether.h>
7 #include <rte_ip.h>
8 
9 #include "cperf_ops.h"
10 #include "cperf_test_vectors.h"
11 
12 static void
13 cperf_set_ops_asym_modex(struct rte_crypto_op **ops,
14 		   uint32_t src_buf_offset __rte_unused,
15 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 		   void *sess,
17 		   const struct cperf_options *options,
18 		   const struct cperf_test_vector *test_vector __rte_unused,
19 		   uint16_t iv_offset __rte_unused,
20 		   uint32_t *imix_idx __rte_unused,
21 		   uint64_t *tsc_start __rte_unused)
22 {
23 	uint16_t i;
24 
25 	for (i = 0; i < nb_ops; i++) {
26 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
27 
28 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
29 		asym_op->modex.base.data = options->modex_data->base.data;
30 		asym_op->modex.base.length = options->modex_data->base.len;
31 		asym_op->modex.result.data = options->modex_data->result.data;
32 		asym_op->modex.result.length = options->modex_data->result.len;
33 		rte_crypto_op_attach_asym_session(ops[i], sess);
34 	}
35 }
36 
37 static void
38 cperf_set_ops_asym_ecdsa(struct rte_crypto_op **ops,
39 		   uint32_t src_buf_offset __rte_unused,
40 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
41 		   void *sess,
42 		   const struct cperf_options *options,
43 		   const struct cperf_test_vector *test_vector __rte_unused,
44 		   uint16_t iv_offset __rte_unused,
45 		   uint32_t *imix_idx __rte_unused,
46 		   uint64_t *tsc_start __rte_unused)
47 {
48 	uint16_t i;
49 
50 	for (i = 0; i < nb_ops; i++) {
51 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
52 
53 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
54 		rte_crypto_op_attach_asym_session(ops[i], sess);
55 
56 		asym_op->ecdsa.op_type = options->asym_op_type;
57 		asym_op->ecdsa.message.data = options->secp256r1_data->message.data;
58 		asym_op->ecdsa.message.length = options->secp256r1_data->message.length;
59 
60 		asym_op->ecdsa.k.data = options->secp256r1_data->k.data;
61 		asym_op->ecdsa.k.length = options->secp256r1_data->k.length;
62 
63 		asym_op->ecdsa.r.data = options->secp256r1_data->sign_r.data;
64 		asym_op->ecdsa.r.length = options->secp256r1_data->sign_r.length;
65 		asym_op->ecdsa.s.data = options->secp256r1_data->sign_s.data;
66 		asym_op->ecdsa.s.length = options->secp256r1_data->sign_s.length;
67 	}
68 }
69 
70 static void
71 cperf_set_ops_asym_eddsa(struct rte_crypto_op **ops,
72 		   uint32_t src_buf_offset __rte_unused,
73 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
74 		   void *sess,
75 		   const struct cperf_options *options,
76 		   const struct cperf_test_vector *test_vector __rte_unused,
77 		   uint16_t iv_offset __rte_unused,
78 		   uint32_t *imix_idx __rte_unused,
79 		   uint64_t *tsc_start __rte_unused)
80 {
81 	uint16_t i;
82 
83 	for (i = 0; i < nb_ops; i++) {
84 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
85 
86 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
87 		rte_crypto_op_attach_asym_session(ops[i], sess);
88 
89 		asym_op->eddsa.op_type = options->asym_op_type;
90 		asym_op->eddsa.message.data = options->eddsa_data->message.data;
91 		asym_op->eddsa.message.length = options->eddsa_data->message.length;
92 
93 		asym_op->eddsa.instance = options->eddsa_data->instance;
94 
95 		asym_op->eddsa.sign.data = options->eddsa_data->sign.data;
96 		asym_op->eddsa.sign.length = options->eddsa_data->sign.length;
97 	}
98 }
99 
100 static void
101 cperf_set_ops_asym_sm2(struct rte_crypto_op **ops,
102 		   uint32_t src_buf_offset __rte_unused,
103 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
104 		   void *sess,
105 		   const struct cperf_options *options,
106 		   const struct cperf_test_vector *test_vector __rte_unused,
107 		   uint16_t iv_offset __rte_unused,
108 		   uint32_t *imix_idx __rte_unused,
109 		   uint64_t *tsc_start __rte_unused)
110 {
111 	uint16_t i;
112 
113 	for (i = 0; i < nb_ops; i++) {
114 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
115 
116 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
117 		rte_crypto_op_attach_asym_session(ops[i], sess);
118 
119 		/* Populate op with operational details */
120 		asym_op->sm2.hash = options->asym_hash_alg;
121 
122 		asym_op->sm2.op_type = options->asym_op_type;
123 		asym_op->sm2.message.data = options->sm2_data->message.data;
124 		asym_op->sm2.message.length = options->sm2_data->message.length;
125 		asym_op->sm2.cipher.data = options->sm2_data->cipher.data;
126 		asym_op->sm2.cipher.length = options->sm2_data->cipher.length;
127 		asym_op->sm2.id.data = options->sm2_data->id.data;
128 		asym_op->sm2.id.length = options->sm2_data->id.length;
129 
130 		asym_op->sm2.k.data = options->sm2_data->k.data;
131 		asym_op->sm2.k.length = options->sm2_data->k.length;
132 
133 		asym_op->sm2.r.data = options->sm2_data->sign_r.data;
134 		asym_op->sm2.r.length = options->sm2_data->sign_r.length;
135 		asym_op->sm2.s.data = options->sm2_data->sign_s.data;
136 		asym_op->sm2.s.length = options->sm2_data->sign_s.length;
137 	}
138 }
139 
140 
141 #ifdef RTE_LIB_SECURITY
142 static void
143 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
144 			const struct cperf_test_vector *test_vector)
145 {
146 	struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
147 
148 	if (options->is_outbound) {
149 		memcpy(ip, test_vector->plaintext.data, sizeof(struct rte_ipv4_hdr));
150 		ip->total_length = rte_cpu_to_be_16(m->pkt_len);
151 	}
152 }
153 
154 static void
155 cperf_set_ops_security(struct rte_crypto_op **ops,
156 		uint32_t src_buf_offset __rte_unused,
157 		uint32_t dst_buf_offset __rte_unused,
158 		uint16_t nb_ops, void *sess,
159 		const struct cperf_options *options,
160 		const struct cperf_test_vector *test_vector,
161 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
162 		uint64_t *tsc_start)
163 {
164 	uint16_t i;
165 
166 	for (i = 0; i < nb_ops; i++) {
167 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
168 		uint32_t buf_sz;
169 
170 		uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
171 					uint32_t *, iv_offset);
172 		*per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
173 
174 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
175 		rte_security_attach_session(ops[i], sess);
176 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
177 							src_buf_offset);
178 
179 		if (options->op_type == CPERF_PDCP) {
180 			sym_op->m_src->buf_len = options->segment_sz;
181 			sym_op->m_src->data_len = options->test_buffer_size;
182 			sym_op->m_src->pkt_len = sym_op->m_src->data_len;
183 		}
184 
185 		if (options->op_type == CPERF_DOCSIS) {
186 			if (options->imix_distribution_count) {
187 				buf_sz = options->imix_buffer_sizes[*imix_idx];
188 				*imix_idx = (*imix_idx + 1) % options->pool_sz;
189 			} else
190 				buf_sz = options->test_buffer_size;
191 
192 			sym_op->m_src->buf_len = options->segment_sz;
193 			sym_op->m_src->data_len = buf_sz;
194 			sym_op->m_src->pkt_len = buf_sz;
195 
196 			/* DOCSIS header is not CRC'ed */
197 			sym_op->auth.data.offset = options->docsis_hdr_sz;
198 			sym_op->auth.data.length = buf_sz -
199 				sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
200 			/*
201 			 * DOCSIS header and SRC and DST MAC addresses are not
202 			 * ciphered
203 			 */
204 			sym_op->cipher.data.offset = sym_op->auth.data.offset +
205 				RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
206 			sym_op->cipher.data.length = buf_sz -
207 				sym_op->cipher.data.offset;
208 		}
209 
210 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
211 		if (dst_buf_offset == 0)
212 			sym_op->m_dst = NULL;
213 		else
214 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
215 							dst_buf_offset);
216 	}
217 
218 	RTE_SET_USED(tsc_start);
219 	RTE_SET_USED(test_vector);
220 }
221 
222 static void
223 cperf_set_ops_security_ipsec(struct rte_crypto_op **ops,
224 		uint32_t src_buf_offset __rte_unused,
225 		uint32_t dst_buf_offset __rte_unused,
226 		uint16_t nb_ops, void *sess,
227 		const struct cperf_options *options,
228 		const struct cperf_test_vector *test_vector,
229 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
230 		uint64_t *tsc_start)
231 {
232 	const uint32_t test_buffer_size = options->test_buffer_size;
233 	uint64_t tsc_start_temp, tsc_end_temp;
234 	uint16_t i = 0;
235 
236 	RTE_SET_USED(imix_idx);
237 
238 	for (i = 0; i < nb_ops; i++) {
239 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
240 		struct rte_mbuf *m = sym_op->m_src;
241 		uint32_t offset = test_buffer_size;
242 
243 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
244 		rte_security_attach_session(ops[i], sess);
245 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + src_buf_offset);
246 		sym_op->m_src->pkt_len = test_buffer_size;
247 
248 		while ((m->next != NULL) && (offset >= m->data_len)) {
249 			offset -= m->data_len;
250 			m = m->next;
251 		}
252 		m->data_len = offset;
253 		/*
254 		 * If there is not enough room in segment,
255 		 * place the digest in the next segment
256 		 */
257 		if (rte_pktmbuf_tailroom(m) < options->digest_sz) {
258 			m = m->next;
259 			offset = 0;
260 		}
261 		m->next = NULL;
262 
263 		sym_op->m_dst = NULL;
264 	}
265 
266 	if (options->test_file != NULL)
267 		return;
268 
269 	tsc_start_temp = rte_rdtsc_precise();
270 
271 	for (i = 0; i < nb_ops; i++) {
272 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
273 		struct rte_mbuf *m = sym_op->m_src;
274 
275 		test_ipsec_vec_populate(m, options, test_vector);
276 	}
277 
278 	tsc_end_temp = rte_rdtsc_precise();
279 	*tsc_start += tsc_end_temp - tsc_start_temp;
280 }
281 
282 static void
283 cperf_set_ops_security_tls(struct rte_crypto_op **ops,
284 		uint32_t src_buf_offset __rte_unused,
285 		uint32_t dst_buf_offset __rte_unused,
286 		uint16_t nb_ops, void *sess,
287 		const struct cperf_options *options,
288 		const struct cperf_test_vector *test_vector,
289 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
290 		uint64_t *tsc_start)
291 {
292 	const uint32_t test_buffer_size = options->test_buffer_size;
293 	uint16_t i = 0;
294 
295 	RTE_SET_USED(imix_idx);
296 	RTE_SET_USED(tsc_start);
297 	RTE_SET_USED(test_vector);
298 
299 	for (i = 0; i < nb_ops; i++) {
300 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
301 		struct rte_mbuf *m = sym_op->m_src;
302 		uint32_t offset = test_buffer_size;
303 
304 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
305 		ops[i]->param1.tls_record.content_type = 0x17;
306 		rte_security_attach_session(ops[i], sess);
307 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + src_buf_offset);
308 		sym_op->m_src->pkt_len = test_buffer_size;
309 
310 		while ((m->next != NULL) && (offset >= m->data_len)) {
311 			offset -= m->data_len;
312 			m = m->next;
313 		}
314 		m->data_len = offset;
315 		/*
316 		 * If there is not enough room in segment,
317 		 * place the digest in the next segment
318 		 */
319 		if ((rte_pktmbuf_tailroom(m)) < options->digest_sz) {
320 			m = m->next;
321 			m->data_len = 0;
322 		}
323 		m->next = NULL;
324 
325 		sym_op->m_dst = NULL;
326 	}
327 }
328 #endif
329 
330 static void
331 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
332 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
333 		uint16_t nb_ops, void *sess,
334 		const struct cperf_options *options,
335 		const struct cperf_test_vector *test_vector __rte_unused,
336 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
337 		uint64_t *tsc_start __rte_unused)
338 {
339 	uint16_t i;
340 
341 	for (i = 0; i < nb_ops; i++) {
342 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
343 
344 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
345 		rte_crypto_op_attach_sym_session(ops[i], sess);
346 
347 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
348 							src_buf_offset);
349 
350 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
351 		if (dst_buf_offset == 0)
352 			sym_op->m_dst = NULL;
353 		else
354 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
355 							dst_buf_offset);
356 
357 		/* cipher parameters */
358 		if (options->imix_distribution_count) {
359 			sym_op->cipher.data.length =
360 				options->imix_buffer_sizes[*imix_idx];
361 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
362 		} else
363 			sym_op->cipher.data.length = options->test_buffer_size;
364 		sym_op->cipher.data.offset = 0;
365 	}
366 }
367 
368 static void
369 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
370 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
371 		uint16_t nb_ops, void *sess,
372 		const struct cperf_options *options,
373 		const struct cperf_test_vector *test_vector __rte_unused,
374 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
375 		uint64_t *tsc_start __rte_unused)
376 {
377 	uint16_t i;
378 
379 	for (i = 0; i < nb_ops; i++) {
380 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
381 
382 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
383 		rte_crypto_op_attach_sym_session(ops[i], sess);
384 
385 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
386 							src_buf_offset);
387 
388 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
389 		if (dst_buf_offset == 0)
390 			sym_op->m_dst = NULL;
391 		else
392 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
393 							dst_buf_offset);
394 
395 		/* auth parameters */
396 		if (options->imix_distribution_count) {
397 			sym_op->auth.data.length =
398 				options->imix_buffer_sizes[*imix_idx];
399 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
400 		} else
401 			sym_op->auth.data.length = options->test_buffer_size;
402 		sym_op->auth.data.offset = 0;
403 	}
404 }
405 
406 static void
407 cperf_set_ops_cipher(struct rte_crypto_op **ops,
408 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
409 		uint16_t nb_ops, void *sess,
410 		const struct cperf_options *options,
411 		const struct cperf_test_vector *test_vector,
412 		uint16_t iv_offset, uint32_t *imix_idx,
413 		uint64_t *tsc_start __rte_unused)
414 {
415 	uint16_t i;
416 
417 	for (i = 0; i < nb_ops; i++) {
418 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
419 
420 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
421 		rte_crypto_op_attach_sym_session(ops[i], sess);
422 
423 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
424 							src_buf_offset);
425 
426 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
427 		if (dst_buf_offset == 0)
428 			sym_op->m_dst = NULL;
429 		else
430 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
431 							dst_buf_offset);
432 
433 		/* cipher parameters */
434 		if (options->imix_distribution_count) {
435 			sym_op->cipher.data.length =
436 				options->imix_buffer_sizes[*imix_idx];
437 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
438 		} else
439 			sym_op->cipher.data.length = options->test_buffer_size;
440 
441 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
442 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
443 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
444 			sym_op->cipher.data.length <<= 3;
445 
446 		sym_op->cipher.data.offset = 0;
447 	}
448 
449 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
450 		for (i = 0; i < nb_ops; i++) {
451 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
452 					uint8_t *, iv_offset);
453 
454 			memcpy(iv_ptr, test_vector->cipher_iv.data,
455 					test_vector->cipher_iv.length);
456 
457 		}
458 	}
459 }
460 
461 static void
462 cperf_set_ops_auth(struct rte_crypto_op **ops,
463 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
464 		uint16_t nb_ops, void *sess,
465 		const struct cperf_options *options,
466 		const struct cperf_test_vector *test_vector,
467 		uint16_t iv_offset, uint32_t *imix_idx,
468 		uint64_t *tsc_start __rte_unused)
469 {
470 	uint16_t i;
471 
472 	for (i = 0; i < nb_ops; i++) {
473 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
474 
475 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
476 		rte_crypto_op_attach_sym_session(ops[i], sess);
477 
478 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
479 							src_buf_offset);
480 
481 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
482 		if (dst_buf_offset == 0)
483 			sym_op->m_dst = NULL;
484 		else
485 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
486 							dst_buf_offset);
487 
488 		if (test_vector->auth_iv.length) {
489 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
490 								uint8_t *,
491 								iv_offset);
492 			memcpy(iv_ptr, test_vector->auth_iv.data,
493 					test_vector->auth_iv.length);
494 		}
495 
496 		/* authentication parameters */
497 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
498 			sym_op->auth.digest.data = test_vector->digest.data;
499 			sym_op->auth.digest.phys_addr =
500 					test_vector->digest.phys_addr;
501 		} else {
502 
503 			uint32_t offset = options->test_buffer_size;
504 			struct rte_mbuf *buf, *tbuf;
505 
506 			if (options->out_of_place) {
507 				buf = sym_op->m_dst;
508 			} else {
509 				tbuf = sym_op->m_src;
510 				while ((tbuf->next != NULL) &&
511 						(offset >= tbuf->data_len)) {
512 					offset -= tbuf->data_len;
513 					tbuf = tbuf->next;
514 				}
515 				/*
516 				 * If there is not enough room in segment,
517 				 * place the digest in the next segment
518 				 */
519 				if ((tbuf->data_len - offset) < options->digest_sz) {
520 					tbuf = tbuf->next;
521 					offset = 0;
522 				}
523 				buf = tbuf;
524 			}
525 
526 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
527 					uint8_t *, offset);
528 			sym_op->auth.digest.phys_addr =
529 					rte_pktmbuf_iova_offset(buf, offset);
530 
531 		}
532 
533 		if (options->imix_distribution_count) {
534 			sym_op->auth.data.length =
535 				options->imix_buffer_sizes[*imix_idx];
536 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
537 		} else
538 			sym_op->auth.data.length = options->test_buffer_size;
539 
540 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
541 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
542 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
543 			sym_op->auth.data.length <<= 3;
544 
545 		sym_op->auth.data.offset = 0;
546 	}
547 
548 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
549 		if (test_vector->auth_iv.length) {
550 			for (i = 0; i < nb_ops; i++) {
551 				uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
552 						uint8_t *, iv_offset);
553 
554 				memcpy(iv_ptr, test_vector->auth_iv.data,
555 						test_vector->auth_iv.length);
556 			}
557 		}
558 	}
559 }
560 
561 static void
562 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
563 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
564 		uint16_t nb_ops, void *sess,
565 		const struct cperf_options *options,
566 		const struct cperf_test_vector *test_vector,
567 		uint16_t iv_offset, uint32_t *imix_idx,
568 		uint64_t *tsc_start __rte_unused)
569 {
570 	uint16_t i;
571 
572 	for (i = 0; i < nb_ops; i++) {
573 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
574 
575 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
576 		rte_crypto_op_attach_sym_session(ops[i], sess);
577 
578 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
579 							src_buf_offset);
580 
581 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
582 		if (dst_buf_offset == 0)
583 			sym_op->m_dst = NULL;
584 		else
585 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
586 							dst_buf_offset);
587 
588 		/* cipher parameters */
589 		if (options->imix_distribution_count) {
590 			sym_op->cipher.data.length =
591 				options->imix_buffer_sizes[*imix_idx];
592 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
593 		} else
594 			sym_op->cipher.data.length = options->test_buffer_size;
595 
596 		if ((options->auth_op == RTE_CRYPTO_AUTH_OP_GENERATE) &&
597 				(options->op_type == CPERF_AUTH_THEN_CIPHER))
598 			sym_op->cipher.data.length += options->digest_sz;
599 
600 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
601 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
602 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
603 			sym_op->cipher.data.length <<= 3;
604 
605 		sym_op->cipher.data.offset = 0;
606 
607 		/* authentication parameters */
608 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
609 			sym_op->auth.digest.data = test_vector->digest.data;
610 			sym_op->auth.digest.phys_addr =
611 					test_vector->digest.phys_addr;
612 		} else {
613 
614 			uint32_t offset = options->test_buffer_size;
615 			struct rte_mbuf *buf, *tbuf;
616 
617 			if (options->out_of_place) {
618 				buf = sym_op->m_dst;
619 			} else {
620 				tbuf = sym_op->m_src;
621 				while ((tbuf->next != NULL) &&
622 						(offset >= tbuf->data_len)) {
623 					offset -= tbuf->data_len;
624 					tbuf = tbuf->next;
625 				}
626 				/*
627 				 * If there is not enough room in segment,
628 				 * place the digest in the next segment
629 				 */
630 				if ((tbuf->data_len - offset) < options->digest_sz) {
631 					tbuf = tbuf->next;
632 					offset = 0;
633 				}
634 				buf = tbuf;
635 			}
636 
637 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
638 					uint8_t *, offset);
639 			sym_op->auth.digest.phys_addr =
640 					rte_pktmbuf_iova_offset(buf, offset);
641 		}
642 
643 		if (options->imix_distribution_count) {
644 			sym_op->auth.data.length =
645 				options->imix_buffer_sizes[*imix_idx];
646 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
647 		} else
648 			sym_op->auth.data.length = options->test_buffer_size;
649 
650 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
651 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
652 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
653 			sym_op->auth.data.length <<= 3;
654 
655 		sym_op->auth.data.offset = 0;
656 	}
657 
658 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
659 		for (i = 0; i < nb_ops; i++) {
660 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
661 					uint8_t *, iv_offset);
662 
663 			memcpy(iv_ptr, test_vector->cipher_iv.data,
664 					test_vector->cipher_iv.length);
665 			if (test_vector->auth_iv.length) {
666 				/*
667 				 * Copy IV after the crypto operation and
668 				 * the cipher IV
669 				 */
670 				iv_ptr += test_vector->cipher_iv.length;
671 				memcpy(iv_ptr, test_vector->auth_iv.data,
672 						test_vector->auth_iv.length);
673 			}
674 		}
675 
676 	}
677 }
678 
679 static void
680 cperf_set_ops_aead(struct rte_crypto_op **ops,
681 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
682 		uint16_t nb_ops, void *sess,
683 		const struct cperf_options *options,
684 		const struct cperf_test_vector *test_vector,
685 		uint16_t iv_offset, uint32_t *imix_idx,
686 		uint64_t *tsc_start __rte_unused)
687 {
688 	uint16_t i;
689 	/* AAD is placed after the IV */
690 	uint16_t aad_offset = iv_offset +
691 			RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
692 
693 	for (i = 0; i < nb_ops; i++) {
694 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
695 
696 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
697 		rte_crypto_op_attach_sym_session(ops[i], sess);
698 
699 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
700 							src_buf_offset);
701 
702 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
703 		if (dst_buf_offset == 0)
704 			sym_op->m_dst = NULL;
705 		else
706 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
707 							dst_buf_offset);
708 
709 		/* AEAD parameters */
710 		if (options->imix_distribution_count) {
711 			sym_op->aead.data.length =
712 				options->imix_buffer_sizes[*imix_idx];
713 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
714 		} else
715 			sym_op->aead.data.length = options->test_buffer_size;
716 		sym_op->aead.data.offset = 0;
717 
718 		sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
719 					uint8_t *, aad_offset);
720 		sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
721 					aad_offset);
722 
723 		if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
724 			sym_op->aead.digest.data = test_vector->digest.data;
725 			sym_op->aead.digest.phys_addr =
726 					test_vector->digest.phys_addr;
727 		} else {
728 
729 			uint32_t offset = sym_op->aead.data.length +
730 						sym_op->aead.data.offset;
731 			struct rte_mbuf *buf, *tbuf;
732 
733 			if (options->out_of_place) {
734 				buf = sym_op->m_dst;
735 			} else {
736 				tbuf = sym_op->m_src;
737 				while ((tbuf->next != NULL) &&
738 						(offset >= tbuf->data_len)) {
739 					offset -= tbuf->data_len;
740 					tbuf = tbuf->next;
741 				}
742 				/*
743 				 * If there is not enough room in segment,
744 				 * place the digest in the next segment
745 				 */
746 				if ((tbuf->data_len - offset) < options->digest_sz) {
747 					tbuf = tbuf->next;
748 					offset = 0;
749 				}
750 				buf = tbuf;
751 			}
752 
753 			sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
754 					uint8_t *, offset);
755 			sym_op->aead.digest.phys_addr =
756 					rte_pktmbuf_iova_offset(buf, offset);
757 		}
758 	}
759 
760 	if ((options->test == CPERF_TEST_TYPE_VERIFY) ||
761 	    (options->test == CPERF_TEST_TYPE_LATENCY) ||
762 	    (options->test == CPERF_TEST_TYPE_THROUGHPUT &&
763 	     (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT ||
764 	      options->cipher_op == RTE_CRYPTO_CIPHER_OP_DECRYPT))) {
765 		for (i = 0; i < nb_ops; i++) {
766 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
767 					uint8_t *, iv_offset);
768 
769 			/*
770 			 * If doing AES-CCM, nonce is copied one byte
771 			 * after the start of IV field, and AAD is copied
772 			 * 18 bytes after the start of the AAD field.
773 			 */
774 			if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
775 				memcpy(iv_ptr + 1, test_vector->aead_iv.data,
776 					test_vector->aead_iv.length);
777 
778 				memcpy(ops[i]->sym->aead.aad.data + 18,
779 					test_vector->aad.data,
780 					test_vector->aad.length);
781 			} else {
782 				memcpy(iv_ptr, test_vector->aead_iv.data,
783 					test_vector->aead_iv.length);
784 
785 				memcpy(ops[i]->sym->aead.aad.data,
786 					test_vector->aad.data,
787 					test_vector->aad.length);
788 			}
789 		}
790 	}
791 }
792 
793 static void *
794 create_ipsec_session(struct rte_mempool *sess_mp,
795 		uint8_t dev_id,
796 		const struct cperf_options *options,
797 		const struct cperf_test_vector *test_vector,
798 		uint16_t iv_offset)
799 {
800 	struct rte_crypto_sym_xform auth_xform = {0};
801 	struct rte_crypto_sym_xform *crypto_xform;
802 	struct rte_crypto_sym_xform xform = {0};
803 
804 	if (options->aead_algo != 0) {
805 		/* Setup AEAD Parameters */
806 		xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
807 		xform.next = NULL;
808 		xform.aead.algo = options->aead_algo;
809 		xform.aead.op = options->aead_op;
810 		xform.aead.iv.offset = iv_offset;
811 		xform.aead.key.data = test_vector->aead_key.data;
812 		xform.aead.key.length = test_vector->aead_key.length;
813 		xform.aead.iv.length = test_vector->aead_iv.length;
814 		xform.aead.digest_length = options->digest_sz;
815 		xform.aead.aad_length = options->aead_aad_sz;
816 		crypto_xform = &xform;
817 	} else if (options->cipher_algo != 0 && options->auth_algo != 0) {
818 		/* Setup Cipher Parameters */
819 		xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
820 		xform.cipher.algo = options->cipher_algo;
821 		xform.cipher.op = options->cipher_op;
822 		xform.cipher.iv.offset = iv_offset;
823 		xform.cipher.iv.length = test_vector->cipher_iv.length;
824 		/* cipher different than null */
825 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
826 			xform.cipher.key.data = test_vector->cipher_key.data;
827 			xform.cipher.key.length =
828 				test_vector->cipher_key.length;
829 		} else {
830 			xform.cipher.key.data = NULL;
831 			xform.cipher.key.length = 0;
832 		}
833 
834 		/* Setup Auth Parameters */
835 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
836 		auth_xform.auth.algo = options->auth_algo;
837 		auth_xform.auth.op = options->auth_op;
838 		auth_xform.auth.iv.offset = iv_offset +
839 				xform.cipher.iv.length;
840 		/* auth different than null */
841 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
842 			auth_xform.auth.digest_length = options->digest_sz;
843 			auth_xform.auth.key.length =
844 						test_vector->auth_key.length;
845 			auth_xform.auth.key.data = test_vector->auth_key.data;
846 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
847 		} else {
848 			auth_xform.auth.digest_length = 0;
849 			auth_xform.auth.key.length = 0;
850 			auth_xform.auth.key.data = NULL;
851 			auth_xform.auth.iv.length = 0;
852 		}
853 
854 		if (options->is_outbound) {
855 			crypto_xform = &xform;
856 			xform.next = &auth_xform;
857 			auth_xform.next = NULL;
858 		} else {
859 			crypto_xform = &auth_xform;
860 			auth_xform.next = &xform;
861 			xform.next = NULL;
862 		}
863 	} else {
864 		return NULL;
865 	}
866 
867 #define CPERF_IPSEC_SRC_IP	0x01010101
868 #define CPERF_IPSEC_DST_IP	0x02020202
869 #define CPERF_IPSEC_SALT	0x0
870 #define CPERF_IPSEC_DEFTTL	64
871 	struct rte_security_ipsec_tunnel_param tunnel = {
872 		.type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
873 		{.ipv4 = {
874 			.src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
875 			.dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
876 			.dscp = 0,
877 			.df = 0,
878 			.ttl = CPERF_IPSEC_DEFTTL,
879 		} },
880 	};
881 	struct rte_security_session_conf sess_conf = {
882 		.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
883 		.protocol = RTE_SECURITY_PROTOCOL_IPSEC,
884 		{.ipsec = {
885 			.spi = rte_lcore_id() + 1,
886 			/**< For testing sake, lcore_id is taken as SPI so that
887 			 * for every core a different session is created.
888 			 */
889 			.salt = CPERF_IPSEC_SALT,
890 			.options = { 0 },
891 			.replay_win_sz = 0,
892 			.proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
893 			.mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
894 			.tunnel = tunnel,
895 		} },
896 		.userdata = NULL,
897 		.crypto_xform = crypto_xform,
898 	};
899 
900 	if (options->is_outbound)
901 		sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS;
902 	else
903 		sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS;
904 
905 	void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
906 
907 	/* Create security session */
908 	return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
909 }
910 
911 static void *
912 create_tls_session(struct rte_mempool *sess_mp,
913 		uint8_t dev_id,
914 		const struct cperf_options *options,
915 		const struct cperf_test_vector *test_vector,
916 		uint16_t iv_offset)
917 {
918 	struct rte_crypto_sym_xform auth_xform = {0};
919 	struct rte_crypto_sym_xform *crypto_xform;
920 	struct rte_crypto_sym_xform xform = {0};
921 
922 	if (options->aead_algo != 0) {
923 		/* Setup AEAD Parameters */
924 		xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
925 		xform.next = NULL;
926 		xform.aead.algo = options->aead_algo;
927 		xform.aead.op = options->aead_op;
928 		xform.aead.iv.offset = iv_offset;
929 		xform.aead.key.data = test_vector->aead_key.data;
930 		xform.aead.key.length = test_vector->aead_key.length;
931 		xform.aead.iv.length = test_vector->aead_iv.length;
932 		xform.aead.digest_length = options->digest_sz;
933 		xform.aead.aad_length = options->aead_aad_sz;
934 		crypto_xform = &xform;
935 	} else if (options->cipher_algo != 0 && options->auth_algo != 0) {
936 		/* Setup Cipher Parameters */
937 		xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
938 		xform.cipher.algo = options->cipher_algo;
939 		xform.cipher.op = options->cipher_op;
940 		xform.cipher.iv.offset = iv_offset;
941 		xform.cipher.iv.length = test_vector->cipher_iv.length;
942 		/* cipher different than null */
943 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
944 			xform.cipher.key.data = test_vector->cipher_key.data;
945 			xform.cipher.key.length = test_vector->cipher_key.length;
946 		} else {
947 			xform.cipher.key.data = NULL;
948 			xform.cipher.key.length = 0;
949 		}
950 
951 		/* Setup Auth Parameters */
952 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
953 		auth_xform.auth.algo = options->auth_algo;
954 		auth_xform.auth.op = options->auth_op;
955 		auth_xform.auth.iv.offset = iv_offset + xform.cipher.iv.length;
956 		/* auth different than null */
957 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
958 			auth_xform.auth.digest_length = options->digest_sz;
959 			auth_xform.auth.key.length = test_vector->auth_key.length;
960 			auth_xform.auth.key.data = test_vector->auth_key.data;
961 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
962 		} else {
963 			auth_xform.auth.digest_length = 0;
964 			auth_xform.auth.key.length = 0;
965 			auth_xform.auth.key.data = NULL;
966 			auth_xform.auth.iv.length = 0;
967 		}
968 
969 		if (options->is_outbound) {
970 			/* Currently supporting AUTH then Encrypt mode only for TLS. */
971 			crypto_xform = &auth_xform;
972 			auth_xform.next = &xform;
973 			xform.next = NULL;
974 		} else {
975 			crypto_xform = &xform;
976 			xform.next = &auth_xform;
977 			auth_xform.next = NULL;
978 		}
979 	} else {
980 		return NULL;
981 	}
982 
983 	struct rte_security_tls_record_sess_options opts = {
984 		.iv_gen_disable = 0,
985 		.extra_padding_enable = 0,
986 	};
987 	struct rte_security_session_conf sess_conf = {
988 		.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
989 		.protocol = RTE_SECURITY_PROTOCOL_TLS_RECORD,
990 		{.tls_record = {
991 			.ver = RTE_SECURITY_VERSION_TLS_1_2,
992 			.options = opts,
993 		} },
994 		.userdata = NULL,
995 		.crypto_xform = crypto_xform,
996 	};
997 	if (options->tls_version)
998 		sess_conf.tls_record.ver = options->tls_version;
999 
1000 	if (options->is_outbound)
1001 		sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_WRITE;
1002 	else
1003 		sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_READ;
1004 
1005 	void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
1006 
1007 	/* Create security session */
1008 	return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
1009 }
1010 
1011 static void *
1012 cperf_create_session(struct rte_mempool *sess_mp,
1013 	uint8_t dev_id,
1014 	const struct cperf_options *options,
1015 	const struct cperf_test_vector *test_vector,
1016 	uint16_t iv_offset)
1017 {
1018 	struct rte_crypto_sym_xform cipher_xform;
1019 	struct rte_crypto_sym_xform auth_xform;
1020 	struct rte_crypto_sym_xform aead_xform;
1021 	void *sess = NULL;
1022 	void *asym_sess = NULL;
1023 	struct rte_crypto_asym_xform xform = {0};
1024 	int ret;
1025 
1026 	if (options->op_type == CPERF_ASYM_MODEX) {
1027 		xform.next = NULL;
1028 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
1029 		xform.modex.modulus.data = options->modex_data->modulus.data;
1030 		xform.modex.modulus.length = options->modex_data->modulus.len;
1031 		xform.modex.exponent.data = options->modex_data->exponent.data;
1032 		xform.modex.exponent.length = options->modex_data->exponent.len;
1033 
1034 		ret = rte_cryptodev_asym_session_create(dev_id, &xform,
1035 				sess_mp, &asym_sess);
1036 		if (ret < 0) {
1037 			RTE_LOG(ERR, USER1, "Asym session create failed\n");
1038 			return NULL;
1039 		}
1040 		return asym_sess;
1041 	}
1042 
1043 	if (options->op_type == CPERF_ASYM_SECP256R1) {
1044 		xform.next = NULL;
1045 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_ECDSA;
1046 		xform.ec.curve_id = options->secp256r1_data->curve;
1047 		xform.ec.pkey.data = options->secp256r1_data->pkey.data;
1048 		xform.ec.pkey.length = options->secp256r1_data->pkey.length;
1049 		xform.ec.q.x.data = options->secp256r1_data->pubkey_qx.data;
1050 		xform.ec.q.x.length = options->secp256r1_data->pubkey_qx.length;
1051 		xform.ec.q.y.data = options->secp256r1_data->pubkey_qy.data;
1052 		xform.ec.q.y.length = options->secp256r1_data->pubkey_qy.length;
1053 
1054 		ret = rte_cryptodev_asym_session_create(dev_id, &xform,
1055 				sess_mp, &asym_sess);
1056 		if (ret < 0) {
1057 			RTE_LOG(ERR, USER1, "ECDSA Asym session create failed\n");
1058 			return NULL;
1059 		}
1060 
1061 		return asym_sess;
1062 	}
1063 
1064 	if (options->op_type == CPERF_ASYM_ED25519) {
1065 		xform.next = NULL;
1066 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_EDDSA;
1067 		xform.ec.curve_id = options->eddsa_data->curve;
1068 		xform.ec.pkey.data = options->eddsa_data->pkey.data;
1069 		xform.ec.pkey.length = options->eddsa_data->pkey.length;
1070 		xform.ec.q.x.data = options->eddsa_data->pubkey.data;
1071 		xform.ec.q.x.length = options->eddsa_data->pubkey.length;
1072 
1073 		ret = rte_cryptodev_asym_session_create(dev_id, &xform,
1074 				sess_mp, &asym_sess);
1075 		if (ret < 0) {
1076 			RTE_LOG(ERR, USER1, "EdDSA Asym session create failed\n");
1077 			return NULL;
1078 		}
1079 
1080 		return asym_sess;
1081 	}
1082 
1083 	if (options->op_type == CPERF_ASYM_SM2) {
1084 		xform.next = NULL;
1085 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2;
1086 		xform.ec.curve_id = options->sm2_data->curve;
1087 		xform.ec.pkey.data = options->sm2_data->pkey.data;
1088 		xform.ec.pkey.length = options->sm2_data->pkey.length;
1089 		xform.ec.q.x.data = options->sm2_data->pubkey_qx.data;
1090 		xform.ec.q.x.length = options->sm2_data->pubkey_qx.length;
1091 		xform.ec.q.y.data = options->sm2_data->pubkey_qy.data;
1092 		xform.ec.q.y.length = options->sm2_data->pubkey_qy.length;
1093 
1094 		ret = rte_cryptodev_asym_session_create(dev_id, &xform,
1095 				sess_mp, &asym_sess);
1096 		if (ret < 0) {
1097 			RTE_LOG(ERR, USER1, "SM2 Asym session create failed\n");
1098 			return NULL;
1099 		}
1100 
1101 		return asym_sess;
1102 	}
1103 #ifdef RTE_LIB_SECURITY
1104 	/*
1105 	 * security only
1106 	 */
1107 	if (options->op_type == CPERF_PDCP) {
1108 		/* Setup Cipher Parameters */
1109 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1110 		cipher_xform.next = NULL;
1111 		cipher_xform.cipher.algo = options->cipher_algo;
1112 		cipher_xform.cipher.op = options->cipher_op;
1113 		cipher_xform.cipher.iv.offset = iv_offset;
1114 		cipher_xform.cipher.iv.length = 4;
1115 
1116 		/* cipher different than null */
1117 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1118 			cipher_xform.cipher.key.data = test_vector->cipher_key.data;
1119 			cipher_xform.cipher.key.length = test_vector->cipher_key.length;
1120 		} else {
1121 			cipher_xform.cipher.key.data = NULL;
1122 			cipher_xform.cipher.key.length = 0;
1123 		}
1124 
1125 		/* Setup Auth Parameters */
1126 		if (options->auth_algo != 0) {
1127 			auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1128 			auth_xform.next = NULL;
1129 			auth_xform.auth.algo = options->auth_algo;
1130 			auth_xform.auth.op = options->auth_op;
1131 			auth_xform.auth.iv.offset = iv_offset +
1132 				cipher_xform.cipher.iv.length;
1133 
1134 			/* auth different than null */
1135 			if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1136 				auth_xform.auth.digest_length = options->digest_sz;
1137 				auth_xform.auth.key.length = test_vector->auth_key.length;
1138 				auth_xform.auth.key.data = test_vector->auth_key.data;
1139 				auth_xform.auth.iv.length = test_vector->auth_iv.length;
1140 			} else {
1141 				auth_xform.auth.digest_length = 0;
1142 				auth_xform.auth.key.length = 0;
1143 				auth_xform.auth.key.data = NULL;
1144 				auth_xform.auth.iv.length = 0;
1145 			}
1146 
1147 			cipher_xform.next = &auth_xform;
1148 		} else {
1149 			cipher_xform.next = NULL;
1150 		}
1151 
1152 		struct rte_security_session_conf sess_conf = {
1153 			.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1154 			.protocol = RTE_SECURITY_PROTOCOL_PDCP,
1155 			{.pdcp = {
1156 				.bearer = 0x16,
1157 				.domain = options->pdcp_domain,
1158 				.pkt_dir = 0,
1159 				.sn_size = options->pdcp_sn_sz,
1160 				.hfn = options->pdcp_ses_hfn_en ?
1161 					PDCP_DEFAULT_HFN : 0,
1162 				.hfn_threshold = 0x70C0A,
1163 				.sdap_enabled = options->pdcp_sdap,
1164 				.hfn_ovrd = !(options->pdcp_ses_hfn_en),
1165 			} },
1166 			.crypto_xform = &cipher_xform
1167 		};
1168 
1169 		void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
1170 
1171 		/* Create security session */
1172 		return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
1173 	}
1174 
1175 	if (options->op_type == CPERF_IPSEC) {
1176 		return create_ipsec_session(sess_mp, dev_id,
1177 				options, test_vector, iv_offset);
1178 	}
1179 
1180 	if (options->op_type == CPERF_TLS) {
1181 		return create_tls_session(sess_mp, dev_id,
1182 				options, test_vector, iv_offset);
1183 	}
1184 
1185 	if (options->op_type == CPERF_DOCSIS) {
1186 		enum rte_security_docsis_direction direction;
1187 
1188 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1189 		cipher_xform.next = NULL;
1190 		cipher_xform.cipher.algo = options->cipher_algo;
1191 		cipher_xform.cipher.op = options->cipher_op;
1192 		cipher_xform.cipher.iv.offset = iv_offset;
1193 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1194 			cipher_xform.cipher.key.data =
1195 				test_vector->cipher_key.data;
1196 			cipher_xform.cipher.key.length =
1197 				test_vector->cipher_key.length;
1198 			cipher_xform.cipher.iv.length =
1199 				test_vector->cipher_iv.length;
1200 		} else {
1201 			cipher_xform.cipher.key.data = NULL;
1202 			cipher_xform.cipher.key.length = 0;
1203 			cipher_xform.cipher.iv.length = 0;
1204 		}
1205 		cipher_xform.next = NULL;
1206 
1207 		if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
1208 			direction = RTE_SECURITY_DOCSIS_DOWNLINK;
1209 		else
1210 			direction = RTE_SECURITY_DOCSIS_UPLINK;
1211 
1212 		struct rte_security_session_conf sess_conf = {
1213 			.action_type =
1214 				RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1215 			.protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
1216 			{.docsis = {
1217 				.direction = direction,
1218 			} },
1219 			.crypto_xform = &cipher_xform
1220 		};
1221 		void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
1222 
1223 		/* Create security session */
1224 		return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
1225 	}
1226 #endif
1227 	/*
1228 	 * cipher only
1229 	 */
1230 	if (options->op_type == CPERF_CIPHER_ONLY) {
1231 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1232 		cipher_xform.next = NULL;
1233 		cipher_xform.cipher.algo = options->cipher_algo;
1234 		cipher_xform.cipher.op = options->cipher_op;
1235 		cipher_xform.cipher.iv.offset = iv_offset;
1236 
1237 		/* cipher different than null */
1238 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1239 			cipher_xform.cipher.key.data =
1240 					test_vector->cipher_key.data;
1241 			cipher_xform.cipher.key.length =
1242 					test_vector->cipher_key.length;
1243 			cipher_xform.cipher.iv.length =
1244 					test_vector->cipher_iv.length;
1245 		} else {
1246 			cipher_xform.cipher.key.data = NULL;
1247 			cipher_xform.cipher.key.length = 0;
1248 			cipher_xform.cipher.iv.length = 0;
1249 		}
1250 		/* create crypto session */
1251 		sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform,
1252 				sess_mp);
1253 	/*
1254 	 *  auth only
1255 	 */
1256 	} else if (options->op_type == CPERF_AUTH_ONLY) {
1257 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1258 		auth_xform.next = NULL;
1259 		auth_xform.auth.algo = options->auth_algo;
1260 		auth_xform.auth.op = options->auth_op;
1261 		auth_xform.auth.iv.offset = iv_offset;
1262 
1263 		/* auth different than null */
1264 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1265 			auth_xform.auth.digest_length =
1266 					options->digest_sz;
1267 			auth_xform.auth.key.length =
1268 					test_vector->auth_key.length;
1269 			auth_xform.auth.key.data = test_vector->auth_key.data;
1270 			auth_xform.auth.iv.length =
1271 					test_vector->auth_iv.length;
1272 		} else {
1273 			auth_xform.auth.digest_length = 0;
1274 			auth_xform.auth.key.length = 0;
1275 			auth_xform.auth.key.data = NULL;
1276 			auth_xform.auth.iv.length = 0;
1277 		}
1278 		/* create crypto session */
1279 		sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform,
1280 				sess_mp);
1281 	/*
1282 	 * cipher and auth
1283 	 */
1284 	} else if (options->op_type == CPERF_CIPHER_THEN_AUTH
1285 			|| options->op_type == CPERF_AUTH_THEN_CIPHER) {
1286 		/*
1287 		 * cipher
1288 		 */
1289 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1290 		cipher_xform.next = NULL;
1291 		cipher_xform.cipher.algo = options->cipher_algo;
1292 		cipher_xform.cipher.op = options->cipher_op;
1293 		cipher_xform.cipher.iv.offset = iv_offset;
1294 
1295 		/* cipher different than null */
1296 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1297 			cipher_xform.cipher.key.data =
1298 					test_vector->cipher_key.data;
1299 			cipher_xform.cipher.key.length =
1300 					test_vector->cipher_key.length;
1301 			cipher_xform.cipher.iv.length =
1302 					test_vector->cipher_iv.length;
1303 		} else {
1304 			cipher_xform.cipher.key.data = NULL;
1305 			cipher_xform.cipher.key.length = 0;
1306 			cipher_xform.cipher.iv.length = 0;
1307 		}
1308 
1309 		/*
1310 		 * auth
1311 		 */
1312 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1313 		auth_xform.next = NULL;
1314 		auth_xform.auth.algo = options->auth_algo;
1315 		auth_xform.auth.op = options->auth_op;
1316 		auth_xform.auth.iv.offset = iv_offset +
1317 			cipher_xform.cipher.iv.length;
1318 
1319 		/* auth different than null */
1320 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1321 			auth_xform.auth.digest_length = options->digest_sz;
1322 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
1323 			auth_xform.auth.key.length =
1324 					test_vector->auth_key.length;
1325 			auth_xform.auth.key.data =
1326 					test_vector->auth_key.data;
1327 		} else {
1328 			auth_xform.auth.digest_length = 0;
1329 			auth_xform.auth.key.length = 0;
1330 			auth_xform.auth.key.data = NULL;
1331 			auth_xform.auth.iv.length = 0;
1332 		}
1333 
1334 		/* cipher then auth */
1335 		if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
1336 			cipher_xform.next = &auth_xform;
1337 			/* create crypto session */
1338 			sess = rte_cryptodev_sym_session_create(dev_id,
1339 					&cipher_xform, sess_mp);
1340 		} else { /* auth then cipher */
1341 			auth_xform.next = &cipher_xform;
1342 			/* create crypto session */
1343 			sess = rte_cryptodev_sym_session_create(dev_id,
1344 					&auth_xform, sess_mp);
1345 		}
1346 	} else { /* options->op_type == CPERF_AEAD */
1347 		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
1348 		aead_xform.next = NULL;
1349 		aead_xform.aead.algo = options->aead_algo;
1350 		aead_xform.aead.op = options->aead_op;
1351 		aead_xform.aead.iv.offset = iv_offset;
1352 
1353 		aead_xform.aead.key.data =
1354 					test_vector->aead_key.data;
1355 		aead_xform.aead.key.length =
1356 					test_vector->aead_key.length;
1357 		aead_xform.aead.iv.length = test_vector->aead_iv.length;
1358 
1359 		aead_xform.aead.digest_length = options->digest_sz;
1360 		aead_xform.aead.aad_length =
1361 					options->aead_aad_sz;
1362 
1363 		/* Create crypto session */
1364 		sess = rte_cryptodev_sym_session_create(dev_id, &aead_xform,
1365 				sess_mp);
1366 	}
1367 
1368 	return sess;
1369 }
1370 
1371 int
1372 cperf_get_op_functions(const struct cperf_options *options,
1373 		struct cperf_op_fns *op_fns)
1374 {
1375 	memset(op_fns, 0, sizeof(struct cperf_op_fns));
1376 
1377 	op_fns->sess_create = cperf_create_session;
1378 
1379 	switch (options->op_type) {
1380 	case CPERF_AEAD:
1381 		op_fns->populate_ops = cperf_set_ops_aead;
1382 		break;
1383 
1384 	case CPERF_AUTH_THEN_CIPHER:
1385 	case CPERF_CIPHER_THEN_AUTH:
1386 		op_fns->populate_ops = cperf_set_ops_cipher_auth;
1387 		break;
1388 	case CPERF_AUTH_ONLY:
1389 		if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1390 			op_fns->populate_ops = cperf_set_ops_null_auth;
1391 		else
1392 			op_fns->populate_ops = cperf_set_ops_auth;
1393 		break;
1394 	case CPERF_CIPHER_ONLY:
1395 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1396 			op_fns->populate_ops = cperf_set_ops_null_cipher;
1397 		else
1398 			op_fns->populate_ops = cperf_set_ops_cipher;
1399 		break;
1400 	case CPERF_ASYM_MODEX:
1401 		op_fns->populate_ops = cperf_set_ops_asym_modex;
1402 		break;
1403 	case CPERF_ASYM_SECP256R1:
1404 		op_fns->populate_ops = cperf_set_ops_asym_ecdsa;
1405 		break;
1406 	case CPERF_ASYM_ED25519:
1407 		op_fns->populate_ops = cperf_set_ops_asym_eddsa;
1408 		break;
1409 	case CPERF_ASYM_SM2:
1410 		op_fns->populate_ops = cperf_set_ops_asym_sm2;
1411 		break;
1412 #ifdef RTE_LIB_SECURITY
1413 	case CPERF_PDCP:
1414 	case CPERF_DOCSIS:
1415 		op_fns->populate_ops = cperf_set_ops_security;
1416 		break;
1417 	case CPERF_IPSEC:
1418 		op_fns->populate_ops = cperf_set_ops_security_ipsec;
1419 		break;
1420 	case CPERF_TLS:
1421 		op_fns->populate_ops = cperf_set_ops_security_tls;
1422 		break;
1423 #endif
1424 	default:
1425 		return -1;
1426 	}
1427 
1428 	return 0;
1429 }
1430