xref: /dpdk/app/test-crypto-perf/cperf_ops.c (revision c9902a15bd005b6d4fe072cf7b60fe4ee679155f)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #include <rte_cryptodev.h>
6 #include <rte_ether.h>
7 
8 #include "cperf_ops.h"
9 #include "cperf_test_vectors.h"
10 
11 static int
12 cperf_set_ops_asym(struct rte_crypto_op **ops,
13 		   uint32_t src_buf_offset __rte_unused,
14 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
15 		   struct rte_cryptodev_sym_session *sess,
16 		   const struct cperf_options *options __rte_unused,
17 		   const struct cperf_test_vector *test_vector __rte_unused,
18 		   uint16_t iv_offset __rte_unused,
19 		   uint32_t *imix_idx __rte_unused)
20 {
21 	uint16_t i;
22 	uint8_t result[sizeof(perf_mod_p)] = { 0 };
23 	struct rte_cryptodev_asym_session *asym_sess = (void *)sess;
24 
25 	for (i = 0; i < nb_ops; i++) {
26 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
27 
28 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
29 		asym_op->modex.base.data = perf_base;
30 		asym_op->modex.base.length = sizeof(perf_base);
31 		asym_op->modex.result.data = result;
32 		asym_op->modex.result.length = sizeof(result);
33 		rte_crypto_op_attach_asym_session(ops[i], asym_sess);
34 	}
35 	return 0;
36 }
37 
38 #ifdef RTE_LIB_SECURITY
39 static int
40 cperf_set_ops_security(struct rte_crypto_op **ops,
41 		uint32_t src_buf_offset __rte_unused,
42 		uint32_t dst_buf_offset __rte_unused,
43 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
44 		const struct cperf_options *options __rte_unused,
45 		const struct cperf_test_vector *test_vector __rte_unused,
46 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
47 {
48 	uint16_t i;
49 
50 	for (i = 0; i < nb_ops; i++) {
51 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
52 		struct rte_security_session *sec_sess =
53 			(struct rte_security_session *)sess;
54 		uint32_t buf_sz;
55 
56 		uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
57 					uint32_t *, iv_offset);
58 		*per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
59 
60 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
61 		rte_security_attach_session(ops[i], sec_sess);
62 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
63 							src_buf_offset);
64 
65 		if (options->op_type == CPERF_PDCP) {
66 			sym_op->m_src->buf_len = options->segment_sz;
67 			sym_op->m_src->data_len = options->test_buffer_size;
68 			sym_op->m_src->pkt_len = sym_op->m_src->data_len;
69 		}
70 
71 		if (options->op_type == CPERF_DOCSIS) {
72 			if (options->imix_distribution_count) {
73 				buf_sz = options->imix_buffer_sizes[*imix_idx];
74 				*imix_idx = (*imix_idx + 1) % options->pool_sz;
75 			} else
76 				buf_sz = options->test_buffer_size;
77 
78 			sym_op->m_src->buf_len = options->segment_sz;
79 			sym_op->m_src->data_len = buf_sz;
80 			sym_op->m_src->pkt_len = buf_sz;
81 
82 			/* DOCSIS header is not CRC'ed */
83 			sym_op->auth.data.offset = options->docsis_hdr_sz;
84 			sym_op->auth.data.length = buf_sz -
85 				sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
86 			/*
87 			 * DOCSIS header and SRC and DST MAC addresses are not
88 			 * ciphered
89 			 */
90 			sym_op->cipher.data.offset = sym_op->auth.data.offset +
91 				RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
92 			sym_op->cipher.data.length = buf_sz -
93 				sym_op->cipher.data.offset;
94 		}
95 
96 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
97 		if (dst_buf_offset == 0)
98 			sym_op->m_dst = NULL;
99 		else
100 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
101 							dst_buf_offset);
102 	}
103 
104 	return 0;
105 }
106 #endif
107 
108 static int
109 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
110 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
111 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
112 		const struct cperf_options *options,
113 		const struct cperf_test_vector *test_vector __rte_unused,
114 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
115 {
116 	uint16_t i;
117 
118 	for (i = 0; i < nb_ops; i++) {
119 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
120 
121 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
122 		rte_crypto_op_attach_sym_session(ops[i], sess);
123 
124 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
125 							src_buf_offset);
126 
127 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
128 		if (dst_buf_offset == 0)
129 			sym_op->m_dst = NULL;
130 		else
131 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
132 							dst_buf_offset);
133 
134 		/* cipher parameters */
135 		if (options->imix_distribution_count) {
136 			sym_op->cipher.data.length =
137 				options->imix_buffer_sizes[*imix_idx];
138 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
139 		} else
140 			sym_op->cipher.data.length = options->test_buffer_size;
141 		sym_op->cipher.data.offset = 0;
142 	}
143 
144 	return 0;
145 }
146 
147 static int
148 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
149 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
150 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
151 		const struct cperf_options *options,
152 		const struct cperf_test_vector *test_vector __rte_unused,
153 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
154 {
155 	uint16_t i;
156 
157 	for (i = 0; i < nb_ops; i++) {
158 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
159 
160 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
161 		rte_crypto_op_attach_sym_session(ops[i], sess);
162 
163 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
164 							src_buf_offset);
165 
166 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
167 		if (dst_buf_offset == 0)
168 			sym_op->m_dst = NULL;
169 		else
170 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
171 							dst_buf_offset);
172 
173 		/* auth parameters */
174 		if (options->imix_distribution_count) {
175 			sym_op->auth.data.length =
176 				options->imix_buffer_sizes[*imix_idx];
177 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
178 		} else
179 			sym_op->auth.data.length = options->test_buffer_size;
180 		sym_op->auth.data.offset = 0;
181 	}
182 
183 	return 0;
184 }
185 
186 static int
187 cperf_set_ops_cipher(struct rte_crypto_op **ops,
188 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
189 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
190 		const struct cperf_options *options,
191 		const struct cperf_test_vector *test_vector,
192 		uint16_t iv_offset, uint32_t *imix_idx)
193 {
194 	uint16_t i;
195 
196 	for (i = 0; i < nb_ops; i++) {
197 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
198 
199 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
200 		rte_crypto_op_attach_sym_session(ops[i], sess);
201 
202 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
203 							src_buf_offset);
204 
205 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
206 		if (dst_buf_offset == 0)
207 			sym_op->m_dst = NULL;
208 		else
209 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
210 							dst_buf_offset);
211 
212 		/* cipher parameters */
213 		if (options->imix_distribution_count) {
214 			sym_op->cipher.data.length =
215 				options->imix_buffer_sizes[*imix_idx];
216 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
217 		} else
218 			sym_op->cipher.data.length = options->test_buffer_size;
219 
220 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
221 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
222 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
223 			sym_op->cipher.data.length <<= 3;
224 
225 		sym_op->cipher.data.offset = 0;
226 	}
227 
228 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
229 		for (i = 0; i < nb_ops; i++) {
230 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
231 					uint8_t *, iv_offset);
232 
233 			memcpy(iv_ptr, test_vector->cipher_iv.data,
234 					test_vector->cipher_iv.length);
235 
236 		}
237 	}
238 
239 	return 0;
240 }
241 
242 static int
243 cperf_set_ops_auth(struct rte_crypto_op **ops,
244 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
245 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
246 		const struct cperf_options *options,
247 		const struct cperf_test_vector *test_vector,
248 		uint16_t iv_offset, uint32_t *imix_idx)
249 {
250 	uint16_t i;
251 
252 	for (i = 0; i < nb_ops; i++) {
253 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
254 
255 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
256 		rte_crypto_op_attach_sym_session(ops[i], sess);
257 
258 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
259 							src_buf_offset);
260 
261 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
262 		if (dst_buf_offset == 0)
263 			sym_op->m_dst = NULL;
264 		else
265 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
266 							dst_buf_offset);
267 
268 		if (test_vector->auth_iv.length) {
269 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
270 								uint8_t *,
271 								iv_offset);
272 			memcpy(iv_ptr, test_vector->auth_iv.data,
273 					test_vector->auth_iv.length);
274 		}
275 
276 		/* authentication parameters */
277 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
278 			sym_op->auth.digest.data = test_vector->digest.data;
279 			sym_op->auth.digest.phys_addr =
280 					test_vector->digest.phys_addr;
281 		} else {
282 
283 			uint32_t offset = options->test_buffer_size;
284 			struct rte_mbuf *buf, *tbuf;
285 
286 			if (options->out_of_place) {
287 				buf = sym_op->m_dst;
288 			} else {
289 				tbuf = sym_op->m_src;
290 				while ((tbuf->next != NULL) &&
291 						(offset >= tbuf->data_len)) {
292 					offset -= tbuf->data_len;
293 					tbuf = tbuf->next;
294 				}
295 				/*
296 				 * If there is not enough room in segment,
297 				 * place the digest in the next segment
298 				 */
299 				if ((tbuf->data_len - offset) < options->digest_sz) {
300 					tbuf = tbuf->next;
301 					offset = 0;
302 				}
303 				buf = tbuf;
304 			}
305 
306 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
307 					uint8_t *, offset);
308 			sym_op->auth.digest.phys_addr =
309 					rte_pktmbuf_iova_offset(buf, offset);
310 
311 		}
312 
313 		if (options->imix_distribution_count) {
314 			sym_op->auth.data.length =
315 				options->imix_buffer_sizes[*imix_idx];
316 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
317 		} else
318 			sym_op->auth.data.length = options->test_buffer_size;
319 
320 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
321 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
322 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
323 			sym_op->auth.data.length <<= 3;
324 
325 		sym_op->auth.data.offset = 0;
326 	}
327 
328 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
329 		if (test_vector->auth_iv.length) {
330 			for (i = 0; i < nb_ops; i++) {
331 				uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
332 						uint8_t *, iv_offset);
333 
334 				memcpy(iv_ptr, test_vector->auth_iv.data,
335 						test_vector->auth_iv.length);
336 			}
337 		}
338 	}
339 	return 0;
340 }
341 
342 static int
343 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
344 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
345 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
346 		const struct cperf_options *options,
347 		const struct cperf_test_vector *test_vector,
348 		uint16_t iv_offset, uint32_t *imix_idx)
349 {
350 	uint16_t i;
351 
352 	for (i = 0; i < nb_ops; i++) {
353 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
354 
355 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
356 		rte_crypto_op_attach_sym_session(ops[i], sess);
357 
358 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
359 							src_buf_offset);
360 
361 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
362 		if (dst_buf_offset == 0)
363 			sym_op->m_dst = NULL;
364 		else
365 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
366 							dst_buf_offset);
367 
368 		/* cipher parameters */
369 		if (options->imix_distribution_count) {
370 			sym_op->cipher.data.length =
371 				options->imix_buffer_sizes[*imix_idx];
372 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
373 		} else
374 			sym_op->cipher.data.length = options->test_buffer_size;
375 
376 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
377 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
378 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
379 			sym_op->cipher.data.length <<= 3;
380 
381 		sym_op->cipher.data.offset = 0;
382 
383 		/* authentication parameters */
384 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
385 			sym_op->auth.digest.data = test_vector->digest.data;
386 			sym_op->auth.digest.phys_addr =
387 					test_vector->digest.phys_addr;
388 		} else {
389 
390 			uint32_t offset = options->test_buffer_size;
391 			struct rte_mbuf *buf, *tbuf;
392 
393 			if (options->out_of_place) {
394 				buf = sym_op->m_dst;
395 			} else {
396 				tbuf = sym_op->m_src;
397 				while ((tbuf->next != NULL) &&
398 						(offset >= tbuf->data_len)) {
399 					offset -= tbuf->data_len;
400 					tbuf = tbuf->next;
401 				}
402 				/*
403 				 * If there is not enough room in segment,
404 				 * place the digest in the next segment
405 				 */
406 				if ((tbuf->data_len - offset) < options->digest_sz) {
407 					tbuf = tbuf->next;
408 					offset = 0;
409 				}
410 				buf = tbuf;
411 			}
412 
413 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
414 					uint8_t *, offset);
415 			sym_op->auth.digest.phys_addr =
416 					rte_pktmbuf_iova_offset(buf, offset);
417 		}
418 
419 		if (options->imix_distribution_count) {
420 			sym_op->auth.data.length =
421 				options->imix_buffer_sizes[*imix_idx];
422 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
423 		} else
424 			sym_op->auth.data.length = options->test_buffer_size;
425 
426 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
427 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
428 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
429 			sym_op->auth.data.length <<= 3;
430 
431 		sym_op->auth.data.offset = 0;
432 	}
433 
434 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
435 		for (i = 0; i < nb_ops; i++) {
436 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
437 					uint8_t *, iv_offset);
438 
439 			memcpy(iv_ptr, test_vector->cipher_iv.data,
440 					test_vector->cipher_iv.length);
441 			if (test_vector->auth_iv.length) {
442 				/*
443 				 * Copy IV after the crypto operation and
444 				 * the cipher IV
445 				 */
446 				iv_ptr += test_vector->cipher_iv.length;
447 				memcpy(iv_ptr, test_vector->auth_iv.data,
448 						test_vector->auth_iv.length);
449 			}
450 		}
451 
452 	}
453 
454 	return 0;
455 }
456 
457 static int
458 cperf_set_ops_aead(struct rte_crypto_op **ops,
459 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
460 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
461 		const struct cperf_options *options,
462 		const struct cperf_test_vector *test_vector,
463 		uint16_t iv_offset, uint32_t *imix_idx)
464 {
465 	uint16_t i;
466 	/* AAD is placed after the IV */
467 	uint16_t aad_offset = iv_offset +
468 			RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
469 
470 	for (i = 0; i < nb_ops; i++) {
471 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
472 
473 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
474 		rte_crypto_op_attach_sym_session(ops[i], sess);
475 
476 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
477 							src_buf_offset);
478 
479 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
480 		if (dst_buf_offset == 0)
481 			sym_op->m_dst = NULL;
482 		else
483 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
484 							dst_buf_offset);
485 
486 		/* AEAD parameters */
487 		if (options->imix_distribution_count) {
488 			sym_op->aead.data.length =
489 				options->imix_buffer_sizes[*imix_idx];
490 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
491 		} else
492 			sym_op->aead.data.length = options->test_buffer_size;
493 		sym_op->aead.data.offset = 0;
494 
495 		sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
496 					uint8_t *, aad_offset);
497 		sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
498 					aad_offset);
499 
500 		if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
501 			sym_op->aead.digest.data = test_vector->digest.data;
502 			sym_op->aead.digest.phys_addr =
503 					test_vector->digest.phys_addr;
504 		} else {
505 
506 			uint32_t offset = sym_op->aead.data.length +
507 						sym_op->aead.data.offset;
508 			struct rte_mbuf *buf, *tbuf;
509 
510 			if (options->out_of_place) {
511 				buf = sym_op->m_dst;
512 			} else {
513 				tbuf = sym_op->m_src;
514 				while ((tbuf->next != NULL) &&
515 						(offset >= tbuf->data_len)) {
516 					offset -= tbuf->data_len;
517 					tbuf = tbuf->next;
518 				}
519 				/*
520 				 * If there is not enough room in segment,
521 				 * place the digest in the next segment
522 				 */
523 				if ((tbuf->data_len - offset) < options->digest_sz) {
524 					tbuf = tbuf->next;
525 					offset = 0;
526 				}
527 				buf = tbuf;
528 			}
529 
530 			sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
531 					uint8_t *, offset);
532 			sym_op->aead.digest.phys_addr =
533 					rte_pktmbuf_iova_offset(buf, offset);
534 		}
535 	}
536 
537 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
538 		for (i = 0; i < nb_ops; i++) {
539 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
540 					uint8_t *, iv_offset);
541 
542 			/*
543 			 * If doing AES-CCM, nonce is copied one byte
544 			 * after the start of IV field, and AAD is copied
545 			 * 18 bytes after the start of the AAD field.
546 			 */
547 			if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
548 				memcpy(iv_ptr + 1, test_vector->aead_iv.data,
549 					test_vector->aead_iv.length);
550 
551 				memcpy(ops[i]->sym->aead.aad.data + 18,
552 					test_vector->aad.data,
553 					test_vector->aad.length);
554 			} else {
555 				memcpy(iv_ptr, test_vector->aead_iv.data,
556 					test_vector->aead_iv.length);
557 
558 				memcpy(ops[i]->sym->aead.aad.data,
559 					test_vector->aad.data,
560 					test_vector->aad.length);
561 			}
562 		}
563 	}
564 
565 	return 0;
566 }
567 
568 static struct rte_cryptodev_sym_session *
569 cperf_create_session(struct rte_mempool *sess_mp,
570 	struct rte_mempool *priv_mp,
571 	uint8_t dev_id,
572 	const struct cperf_options *options,
573 	const struct cperf_test_vector *test_vector,
574 	uint16_t iv_offset)
575 {
576 	struct rte_crypto_sym_xform cipher_xform;
577 	struct rte_crypto_sym_xform auth_xform;
578 	struct rte_crypto_sym_xform aead_xform;
579 	struct rte_cryptodev_sym_session *sess = NULL;
580 	struct rte_crypto_asym_xform xform = {0};
581 	int rc;
582 
583 	if (options->op_type == CPERF_ASYM_MODEX) {
584 		xform.next = NULL;
585 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
586 		xform.modex.modulus.data = perf_mod_p;
587 		xform.modex.modulus.length = sizeof(perf_mod_p);
588 		xform.modex.exponent.data = perf_mod_e;
589 		xform.modex.exponent.length = sizeof(perf_mod_e);
590 
591 		sess = (void *)rte_cryptodev_asym_session_create(sess_mp);
592 		if (sess == NULL)
593 			return NULL;
594 		rc = rte_cryptodev_asym_session_init(dev_id, (void *)sess,
595 						     &xform, priv_mp);
596 		if (rc < 0) {
597 			if (sess != NULL) {
598 				rte_cryptodev_asym_session_clear(dev_id,
599 								 (void *)sess);
600 				rte_cryptodev_asym_session_free((void *)sess);
601 			}
602 			return NULL;
603 		}
604 		return sess;
605 	}
606 #ifdef RTE_LIB_SECURITY
607 	/*
608 	 * security only
609 	 */
610 	if (options->op_type == CPERF_PDCP) {
611 		/* Setup Cipher Parameters */
612 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
613 		cipher_xform.next = NULL;
614 		cipher_xform.cipher.algo = options->cipher_algo;
615 		cipher_xform.cipher.op = options->cipher_op;
616 		cipher_xform.cipher.iv.offset = iv_offset;
617 		cipher_xform.cipher.iv.length = 4;
618 
619 		/* cipher different than null */
620 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
621 			cipher_xform.cipher.key.data = test_vector->cipher_key.data;
622 			cipher_xform.cipher.key.length = test_vector->cipher_key.length;
623 		} else {
624 			cipher_xform.cipher.key.data = NULL;
625 			cipher_xform.cipher.key.length = 0;
626 		}
627 
628 		/* Setup Auth Parameters */
629 		if (options->auth_algo != 0) {
630 			auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
631 			auth_xform.next = NULL;
632 			auth_xform.auth.algo = options->auth_algo;
633 			auth_xform.auth.op = options->auth_op;
634 			auth_xform.auth.iv.offset = iv_offset +
635 				cipher_xform.cipher.iv.length;
636 
637 			/* auth different than null */
638 			if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
639 				auth_xform.auth.digest_length = options->digest_sz;
640 				auth_xform.auth.key.length = test_vector->auth_key.length;
641 				auth_xform.auth.key.data = test_vector->auth_key.data;
642 				auth_xform.auth.iv.length = test_vector->auth_iv.length;
643 			} else {
644 				auth_xform.auth.digest_length = 0;
645 				auth_xform.auth.key.length = 0;
646 				auth_xform.auth.key.data = NULL;
647 				auth_xform.auth.iv.length = 0;
648 			}
649 
650 			cipher_xform.next = &auth_xform;
651 		} else {
652 			cipher_xform.next = NULL;
653 		}
654 
655 		struct rte_security_session_conf sess_conf = {
656 			.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
657 			.protocol = RTE_SECURITY_PROTOCOL_PDCP,
658 			{.pdcp = {
659 				.bearer = 0x16,
660 				.domain = options->pdcp_domain,
661 				.pkt_dir = 0,
662 				.sn_size = options->pdcp_sn_sz,
663 				.hfn = options->pdcp_ses_hfn_en ?
664 					PDCP_DEFAULT_HFN : 0,
665 				.hfn_threshold = 0x70C0A,
666 				.hfn_ovrd = !(options->pdcp_ses_hfn_en),
667 			} },
668 			.crypto_xform = &cipher_xform
669 		};
670 
671 		struct rte_security_ctx *ctx = (struct rte_security_ctx *)
672 					rte_cryptodev_get_sec_ctx(dev_id);
673 
674 		/* Create security session */
675 		return (void *)rte_security_session_create(ctx,
676 					&sess_conf, sess_mp, priv_mp);
677 	}
678 	if (options->op_type == CPERF_DOCSIS) {
679 		enum rte_security_docsis_direction direction;
680 
681 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
682 		cipher_xform.next = NULL;
683 		cipher_xform.cipher.algo = options->cipher_algo;
684 		cipher_xform.cipher.op = options->cipher_op;
685 		cipher_xform.cipher.iv.offset = iv_offset;
686 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
687 			cipher_xform.cipher.key.data =
688 				test_vector->cipher_key.data;
689 			cipher_xform.cipher.key.length =
690 				test_vector->cipher_key.length;
691 			cipher_xform.cipher.iv.length =
692 				test_vector->cipher_iv.length;
693 		} else {
694 			cipher_xform.cipher.key.data = NULL;
695 			cipher_xform.cipher.key.length = 0;
696 			cipher_xform.cipher.iv.length = 0;
697 		}
698 		cipher_xform.next = NULL;
699 
700 		if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
701 			direction = RTE_SECURITY_DOCSIS_DOWNLINK;
702 		else
703 			direction = RTE_SECURITY_DOCSIS_UPLINK;
704 
705 		struct rte_security_session_conf sess_conf = {
706 			.action_type =
707 				RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
708 			.protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
709 			{.docsis = {
710 				.direction = direction,
711 			} },
712 			.crypto_xform = &cipher_xform
713 		};
714 		struct rte_security_ctx *ctx = (struct rte_security_ctx *)
715 					rte_cryptodev_get_sec_ctx(dev_id);
716 
717 		/* Create security session */
718 		return (void *)rte_security_session_create(ctx,
719 					&sess_conf, sess_mp, priv_mp);
720 	}
721 #endif
722 	sess = rte_cryptodev_sym_session_create(sess_mp);
723 	/*
724 	 * cipher only
725 	 */
726 	if (options->op_type == CPERF_CIPHER_ONLY) {
727 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
728 		cipher_xform.next = NULL;
729 		cipher_xform.cipher.algo = options->cipher_algo;
730 		cipher_xform.cipher.op = options->cipher_op;
731 		cipher_xform.cipher.iv.offset = iv_offset;
732 
733 		/* cipher different than null */
734 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
735 			cipher_xform.cipher.key.data =
736 					test_vector->cipher_key.data;
737 			cipher_xform.cipher.key.length =
738 					test_vector->cipher_key.length;
739 			cipher_xform.cipher.iv.length =
740 					test_vector->cipher_iv.length;
741 		} else {
742 			cipher_xform.cipher.key.data = NULL;
743 			cipher_xform.cipher.key.length = 0;
744 			cipher_xform.cipher.iv.length = 0;
745 		}
746 		/* create crypto session */
747 		rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
748 				priv_mp);
749 	/*
750 	 *  auth only
751 	 */
752 	} else if (options->op_type == CPERF_AUTH_ONLY) {
753 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
754 		auth_xform.next = NULL;
755 		auth_xform.auth.algo = options->auth_algo;
756 		auth_xform.auth.op = options->auth_op;
757 		auth_xform.auth.iv.offset = iv_offset;
758 
759 		/* auth different than null */
760 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
761 			auth_xform.auth.digest_length =
762 					options->digest_sz;
763 			auth_xform.auth.key.length =
764 					test_vector->auth_key.length;
765 			auth_xform.auth.key.data = test_vector->auth_key.data;
766 			auth_xform.auth.iv.length =
767 					test_vector->auth_iv.length;
768 		} else {
769 			auth_xform.auth.digest_length = 0;
770 			auth_xform.auth.key.length = 0;
771 			auth_xform.auth.key.data = NULL;
772 			auth_xform.auth.iv.length = 0;
773 		}
774 		/* create crypto session */
775 		rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
776 				priv_mp);
777 	/*
778 	 * cipher and auth
779 	 */
780 	} else if (options->op_type == CPERF_CIPHER_THEN_AUTH
781 			|| options->op_type == CPERF_AUTH_THEN_CIPHER) {
782 		/*
783 		 * cipher
784 		 */
785 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
786 		cipher_xform.next = NULL;
787 		cipher_xform.cipher.algo = options->cipher_algo;
788 		cipher_xform.cipher.op = options->cipher_op;
789 		cipher_xform.cipher.iv.offset = iv_offset;
790 
791 		/* cipher different than null */
792 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
793 			cipher_xform.cipher.key.data =
794 					test_vector->cipher_key.data;
795 			cipher_xform.cipher.key.length =
796 					test_vector->cipher_key.length;
797 			cipher_xform.cipher.iv.length =
798 					test_vector->cipher_iv.length;
799 		} else {
800 			cipher_xform.cipher.key.data = NULL;
801 			cipher_xform.cipher.key.length = 0;
802 			cipher_xform.cipher.iv.length = 0;
803 		}
804 
805 		/*
806 		 * auth
807 		 */
808 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
809 		auth_xform.next = NULL;
810 		auth_xform.auth.algo = options->auth_algo;
811 		auth_xform.auth.op = options->auth_op;
812 		auth_xform.auth.iv.offset = iv_offset +
813 			cipher_xform.cipher.iv.length;
814 
815 		/* auth different than null */
816 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
817 			auth_xform.auth.digest_length = options->digest_sz;
818 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
819 			auth_xform.auth.key.length =
820 					test_vector->auth_key.length;
821 			auth_xform.auth.key.data =
822 					test_vector->auth_key.data;
823 		} else {
824 			auth_xform.auth.digest_length = 0;
825 			auth_xform.auth.key.length = 0;
826 			auth_xform.auth.key.data = NULL;
827 			auth_xform.auth.iv.length = 0;
828 		}
829 
830 		/* cipher then auth */
831 		if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
832 			cipher_xform.next = &auth_xform;
833 			/* create crypto session */
834 			rte_cryptodev_sym_session_init(dev_id,
835 					sess, &cipher_xform, priv_mp);
836 		} else { /* auth then cipher */
837 			auth_xform.next = &cipher_xform;
838 			/* create crypto session */
839 			rte_cryptodev_sym_session_init(dev_id,
840 					sess, &auth_xform, priv_mp);
841 		}
842 	} else { /* options->op_type == CPERF_AEAD */
843 		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
844 		aead_xform.next = NULL;
845 		aead_xform.aead.algo = options->aead_algo;
846 		aead_xform.aead.op = options->aead_op;
847 		aead_xform.aead.iv.offset = iv_offset;
848 
849 		aead_xform.aead.key.data =
850 					test_vector->aead_key.data;
851 		aead_xform.aead.key.length =
852 					test_vector->aead_key.length;
853 		aead_xform.aead.iv.length = test_vector->aead_iv.length;
854 
855 		aead_xform.aead.digest_length = options->digest_sz;
856 		aead_xform.aead.aad_length =
857 					options->aead_aad_sz;
858 
859 		/* Create crypto session */
860 		rte_cryptodev_sym_session_init(dev_id,
861 					sess, &aead_xform, priv_mp);
862 	}
863 
864 	return sess;
865 }
866 
867 int
868 cperf_get_op_functions(const struct cperf_options *options,
869 		struct cperf_op_fns *op_fns)
870 {
871 	memset(op_fns, 0, sizeof(struct cperf_op_fns));
872 
873 	op_fns->sess_create = cperf_create_session;
874 
875 	if (options->op_type == CPERF_ASYM_MODEX) {
876 		op_fns->populate_ops = cperf_set_ops_asym;
877 		return 0;
878 	}
879 
880 	if (options->op_type == CPERF_AEAD) {
881 		op_fns->populate_ops = cperf_set_ops_aead;
882 		return 0;
883 	}
884 
885 	if (options->op_type == CPERF_AUTH_THEN_CIPHER
886 			|| options->op_type == CPERF_CIPHER_THEN_AUTH) {
887 		op_fns->populate_ops = cperf_set_ops_cipher_auth;
888 		return 0;
889 	}
890 	if (options->op_type == CPERF_AUTH_ONLY) {
891 		if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
892 			op_fns->populate_ops = cperf_set_ops_null_auth;
893 		else
894 			op_fns->populate_ops = cperf_set_ops_auth;
895 		return 0;
896 	}
897 	if (options->op_type == CPERF_CIPHER_ONLY) {
898 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
899 			op_fns->populate_ops = cperf_set_ops_null_cipher;
900 		else
901 			op_fns->populate_ops = cperf_set_ops_cipher;
902 		return 0;
903 	}
904 #ifdef RTE_LIB_SECURITY
905 	if (options->op_type == CPERF_PDCP) {
906 		op_fns->populate_ops = cperf_set_ops_security;
907 		return 0;
908 	}
909 	if (options->op_type == CPERF_DOCSIS) {
910 		op_fns->populate_ops = cperf_set_ops_security;
911 		return 0;
912 	}
913 #endif
914 	return -1;
915 }
916