xref: /dpdk/app/test-crypto-perf/cperf_ops.c (revision 3cc6ecfdfe85d2577fef30e1791bb7534e3d60b3)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #include <rte_cryptodev.h>
6 #include <rte_ether.h>
7 
8 #include "cperf_ops.h"
9 #include "cperf_test_vectors.h"
10 
11 #ifdef RTE_LIBRTE_SECURITY
12 static int
13 cperf_set_ops_security(struct rte_crypto_op **ops,
14 		uint32_t src_buf_offset __rte_unused,
15 		uint32_t dst_buf_offset __rte_unused,
16 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
17 		const struct cperf_options *options __rte_unused,
18 		const struct cperf_test_vector *test_vector __rte_unused,
19 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
20 {
21 	uint16_t i;
22 
23 	for (i = 0; i < nb_ops; i++) {
24 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
25 		struct rte_security_session *sec_sess =
26 			(struct rte_security_session *)sess;
27 		uint32_t buf_sz;
28 
29 		uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
30 					uint32_t *, iv_offset);
31 		*per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
32 
33 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
34 		rte_security_attach_session(ops[i], sec_sess);
35 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
36 							src_buf_offset);
37 
38 		if (options->op_type == CPERF_PDCP) {
39 			sym_op->m_src->buf_len = options->segment_sz;
40 			sym_op->m_src->data_len = options->test_buffer_size;
41 			sym_op->m_src->pkt_len = sym_op->m_src->data_len;
42 		}
43 
44 		if (options->op_type == CPERF_DOCSIS) {
45 			if (options->imix_distribution_count) {
46 				buf_sz = options->imix_buffer_sizes[*imix_idx];
47 				*imix_idx = (*imix_idx + 1) % options->pool_sz;
48 			} else
49 				buf_sz = options->test_buffer_size;
50 
51 			sym_op->m_src->buf_len = options->segment_sz;
52 			sym_op->m_src->data_len = buf_sz;
53 			sym_op->m_src->pkt_len = buf_sz;
54 
55 			/* DOCSIS header is not CRC'ed */
56 			sym_op->auth.data.offset = options->docsis_hdr_sz;
57 			sym_op->auth.data.length = buf_sz -
58 				sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
59 			/*
60 			 * DOCSIS header and SRC and DST MAC addresses are not
61 			 * ciphered
62 			 */
63 			sym_op->cipher.data.offset = sym_op->auth.data.offset +
64 				RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
65 			sym_op->cipher.data.length = buf_sz -
66 				sym_op->cipher.data.offset;
67 		}
68 
69 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
70 		if (dst_buf_offset == 0)
71 			sym_op->m_dst = NULL;
72 		else
73 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
74 							dst_buf_offset);
75 	}
76 
77 	return 0;
78 }
79 #endif
80 
81 static int
82 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
83 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
84 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
85 		const struct cperf_options *options,
86 		const struct cperf_test_vector *test_vector __rte_unused,
87 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
88 {
89 	uint16_t i;
90 
91 	for (i = 0; i < nb_ops; i++) {
92 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
93 
94 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
95 		rte_crypto_op_attach_sym_session(ops[i], sess);
96 
97 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
98 							src_buf_offset);
99 
100 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
101 		if (dst_buf_offset == 0)
102 			sym_op->m_dst = NULL;
103 		else
104 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
105 							dst_buf_offset);
106 
107 		/* cipher parameters */
108 		if (options->imix_distribution_count) {
109 			sym_op->cipher.data.length =
110 				options->imix_buffer_sizes[*imix_idx];
111 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
112 		} else
113 			sym_op->cipher.data.length = options->test_buffer_size;
114 		sym_op->cipher.data.offset = 0;
115 	}
116 
117 	return 0;
118 }
119 
120 static int
121 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
122 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
123 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
124 		const struct cperf_options *options,
125 		const struct cperf_test_vector *test_vector __rte_unused,
126 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx)
127 {
128 	uint16_t i;
129 
130 	for (i = 0; i < nb_ops; i++) {
131 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
132 
133 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
134 		rte_crypto_op_attach_sym_session(ops[i], sess);
135 
136 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
137 							src_buf_offset);
138 
139 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
140 		if (dst_buf_offset == 0)
141 			sym_op->m_dst = NULL;
142 		else
143 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
144 							dst_buf_offset);
145 
146 		/* auth parameters */
147 		if (options->imix_distribution_count) {
148 			sym_op->auth.data.length =
149 				options->imix_buffer_sizes[*imix_idx];
150 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
151 		} else
152 			sym_op->auth.data.length = options->test_buffer_size;
153 		sym_op->auth.data.offset = 0;
154 	}
155 
156 	return 0;
157 }
158 
159 static int
160 cperf_set_ops_cipher(struct rte_crypto_op **ops,
161 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
162 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
163 		const struct cperf_options *options,
164 		const struct cperf_test_vector *test_vector,
165 		uint16_t iv_offset, uint32_t *imix_idx)
166 {
167 	uint16_t i;
168 
169 	for (i = 0; i < nb_ops; i++) {
170 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
171 
172 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
173 		rte_crypto_op_attach_sym_session(ops[i], sess);
174 
175 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
176 							src_buf_offset);
177 
178 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
179 		if (dst_buf_offset == 0)
180 			sym_op->m_dst = NULL;
181 		else
182 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
183 							dst_buf_offset);
184 
185 		/* cipher parameters */
186 		if (options->imix_distribution_count) {
187 			sym_op->cipher.data.length =
188 				options->imix_buffer_sizes[*imix_idx];
189 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
190 		} else
191 			sym_op->cipher.data.length = options->test_buffer_size;
192 
193 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
194 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
195 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
196 			sym_op->cipher.data.length <<= 3;
197 
198 		sym_op->cipher.data.offset = 0;
199 	}
200 
201 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
202 		for (i = 0; i < nb_ops; i++) {
203 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
204 					uint8_t *, iv_offset);
205 
206 			memcpy(iv_ptr, test_vector->cipher_iv.data,
207 					test_vector->cipher_iv.length);
208 
209 		}
210 	}
211 
212 	return 0;
213 }
214 
215 static int
216 cperf_set_ops_auth(struct rte_crypto_op **ops,
217 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
218 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
219 		const struct cperf_options *options,
220 		const struct cperf_test_vector *test_vector,
221 		uint16_t iv_offset, uint32_t *imix_idx)
222 {
223 	uint16_t i;
224 
225 	for (i = 0; i < nb_ops; i++) {
226 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
227 
228 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
229 		rte_crypto_op_attach_sym_session(ops[i], sess);
230 
231 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
232 							src_buf_offset);
233 
234 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
235 		if (dst_buf_offset == 0)
236 			sym_op->m_dst = NULL;
237 		else
238 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
239 							dst_buf_offset);
240 
241 		if (test_vector->auth_iv.length) {
242 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
243 								uint8_t *,
244 								iv_offset);
245 			memcpy(iv_ptr, test_vector->auth_iv.data,
246 					test_vector->auth_iv.length);
247 		}
248 
249 		/* authentication parameters */
250 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
251 			sym_op->auth.digest.data = test_vector->digest.data;
252 			sym_op->auth.digest.phys_addr =
253 					test_vector->digest.phys_addr;
254 		} else {
255 
256 			uint32_t offset = options->test_buffer_size;
257 			struct rte_mbuf *buf, *tbuf;
258 
259 			if (options->out_of_place) {
260 				buf = sym_op->m_dst;
261 			} else {
262 				tbuf = sym_op->m_src;
263 				while ((tbuf->next != NULL) &&
264 						(offset >= tbuf->data_len)) {
265 					offset -= tbuf->data_len;
266 					tbuf = tbuf->next;
267 				}
268 				/*
269 				 * If there is not enough room in segment,
270 				 * place the digest in the next segment
271 				 */
272 				if ((tbuf->data_len - offset) < options->digest_sz) {
273 					tbuf = tbuf->next;
274 					offset = 0;
275 				}
276 				buf = tbuf;
277 			}
278 
279 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
280 					uint8_t *, offset);
281 			sym_op->auth.digest.phys_addr =
282 					rte_pktmbuf_iova_offset(buf, offset);
283 
284 		}
285 
286 		if (options->imix_distribution_count) {
287 			sym_op->auth.data.length =
288 				options->imix_buffer_sizes[*imix_idx];
289 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
290 		} else
291 			sym_op->auth.data.length = options->test_buffer_size;
292 
293 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
294 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
295 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
296 			sym_op->auth.data.length <<= 3;
297 
298 		sym_op->auth.data.offset = 0;
299 	}
300 
301 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
302 		if (test_vector->auth_iv.length) {
303 			for (i = 0; i < nb_ops; i++) {
304 				uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
305 						uint8_t *, iv_offset);
306 
307 				memcpy(iv_ptr, test_vector->auth_iv.data,
308 						test_vector->auth_iv.length);
309 			}
310 		}
311 	}
312 	return 0;
313 }
314 
315 static int
316 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
317 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
318 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
319 		const struct cperf_options *options,
320 		const struct cperf_test_vector *test_vector,
321 		uint16_t iv_offset, uint32_t *imix_idx)
322 {
323 	uint16_t i;
324 
325 	for (i = 0; i < nb_ops; i++) {
326 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
327 
328 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
329 		rte_crypto_op_attach_sym_session(ops[i], sess);
330 
331 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
332 							src_buf_offset);
333 
334 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
335 		if (dst_buf_offset == 0)
336 			sym_op->m_dst = NULL;
337 		else
338 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
339 							dst_buf_offset);
340 
341 		/* cipher parameters */
342 		if (options->imix_distribution_count) {
343 			sym_op->cipher.data.length =
344 				options->imix_buffer_sizes[*imix_idx];
345 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
346 		} else
347 			sym_op->cipher.data.length = options->test_buffer_size;
348 
349 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
350 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
351 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
352 			sym_op->cipher.data.length <<= 3;
353 
354 		sym_op->cipher.data.offset = 0;
355 
356 		/* authentication parameters */
357 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
358 			sym_op->auth.digest.data = test_vector->digest.data;
359 			sym_op->auth.digest.phys_addr =
360 					test_vector->digest.phys_addr;
361 		} else {
362 
363 			uint32_t offset = options->test_buffer_size;
364 			struct rte_mbuf *buf, *tbuf;
365 
366 			if (options->out_of_place) {
367 				buf = sym_op->m_dst;
368 			} else {
369 				tbuf = sym_op->m_src;
370 				while ((tbuf->next != NULL) &&
371 						(offset >= tbuf->data_len)) {
372 					offset -= tbuf->data_len;
373 					tbuf = tbuf->next;
374 				}
375 				/*
376 				 * If there is not enough room in segment,
377 				 * place the digest in the next segment
378 				 */
379 				if ((tbuf->data_len - offset) < options->digest_sz) {
380 					tbuf = tbuf->next;
381 					offset = 0;
382 				}
383 				buf = tbuf;
384 			}
385 
386 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
387 					uint8_t *, offset);
388 			sym_op->auth.digest.phys_addr =
389 					rte_pktmbuf_iova_offset(buf, offset);
390 		}
391 
392 		if (options->imix_distribution_count) {
393 			sym_op->auth.data.length =
394 				options->imix_buffer_sizes[*imix_idx];
395 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
396 		} else
397 			sym_op->auth.data.length = options->test_buffer_size;
398 
399 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
400 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
401 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
402 			sym_op->auth.data.length <<= 3;
403 
404 		sym_op->auth.data.offset = 0;
405 	}
406 
407 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
408 		for (i = 0; i < nb_ops; i++) {
409 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
410 					uint8_t *, iv_offset);
411 
412 			memcpy(iv_ptr, test_vector->cipher_iv.data,
413 					test_vector->cipher_iv.length);
414 			if (test_vector->auth_iv.length) {
415 				/*
416 				 * Copy IV after the crypto operation and
417 				 * the cipher IV
418 				 */
419 				iv_ptr += test_vector->cipher_iv.length;
420 				memcpy(iv_ptr, test_vector->auth_iv.data,
421 						test_vector->auth_iv.length);
422 			}
423 		}
424 
425 	}
426 
427 	return 0;
428 }
429 
430 static int
431 cperf_set_ops_aead(struct rte_crypto_op **ops,
432 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
433 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
434 		const struct cperf_options *options,
435 		const struct cperf_test_vector *test_vector,
436 		uint16_t iv_offset, uint32_t *imix_idx)
437 {
438 	uint16_t i;
439 	/* AAD is placed after the IV */
440 	uint16_t aad_offset = iv_offset +
441 			RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
442 
443 	for (i = 0; i < nb_ops; i++) {
444 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
445 
446 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
447 		rte_crypto_op_attach_sym_session(ops[i], sess);
448 
449 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
450 							src_buf_offset);
451 
452 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
453 		if (dst_buf_offset == 0)
454 			sym_op->m_dst = NULL;
455 		else
456 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
457 							dst_buf_offset);
458 
459 		/* AEAD parameters */
460 		if (options->imix_distribution_count) {
461 			sym_op->aead.data.length =
462 				options->imix_buffer_sizes[*imix_idx];
463 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
464 		} else
465 			sym_op->aead.data.length = options->test_buffer_size;
466 		sym_op->aead.data.offset = 0;
467 
468 		sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
469 					uint8_t *, aad_offset);
470 		sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
471 					aad_offset);
472 
473 		if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
474 			sym_op->aead.digest.data = test_vector->digest.data;
475 			sym_op->aead.digest.phys_addr =
476 					test_vector->digest.phys_addr;
477 		} else {
478 
479 			uint32_t offset = sym_op->aead.data.length +
480 						sym_op->aead.data.offset;
481 			struct rte_mbuf *buf, *tbuf;
482 
483 			if (options->out_of_place) {
484 				buf = sym_op->m_dst;
485 			} else {
486 				tbuf = sym_op->m_src;
487 				while ((tbuf->next != NULL) &&
488 						(offset >= tbuf->data_len)) {
489 					offset -= tbuf->data_len;
490 					tbuf = tbuf->next;
491 				}
492 				/*
493 				 * If there is not enough room in segment,
494 				 * place the digest in the next segment
495 				 */
496 				if ((tbuf->data_len - offset) < options->digest_sz) {
497 					tbuf = tbuf->next;
498 					offset = 0;
499 				}
500 				buf = tbuf;
501 			}
502 
503 			sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
504 					uint8_t *, offset);
505 			sym_op->aead.digest.phys_addr =
506 					rte_pktmbuf_iova_offset(buf, offset);
507 		}
508 	}
509 
510 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
511 		for (i = 0; i < nb_ops; i++) {
512 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
513 					uint8_t *, iv_offset);
514 
515 			/*
516 			 * If doing AES-CCM, nonce is copied one byte
517 			 * after the start of IV field, and AAD is copied
518 			 * 18 bytes after the start of the AAD field.
519 			 */
520 			if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
521 				memcpy(iv_ptr + 1, test_vector->aead_iv.data,
522 					test_vector->aead_iv.length);
523 
524 				memcpy(ops[i]->sym->aead.aad.data + 18,
525 					test_vector->aad.data,
526 					test_vector->aad.length);
527 			} else {
528 				memcpy(iv_ptr, test_vector->aead_iv.data,
529 					test_vector->aead_iv.length);
530 
531 				memcpy(ops[i]->sym->aead.aad.data,
532 					test_vector->aad.data,
533 					test_vector->aad.length);
534 			}
535 		}
536 	}
537 
538 	return 0;
539 }
540 
541 static struct rte_cryptodev_sym_session *
542 cperf_create_session(struct rte_mempool *sess_mp,
543 	struct rte_mempool *priv_mp,
544 	uint8_t dev_id,
545 	const struct cperf_options *options,
546 	const struct cperf_test_vector *test_vector,
547 	uint16_t iv_offset)
548 {
549 	struct rte_crypto_sym_xform cipher_xform;
550 	struct rte_crypto_sym_xform auth_xform;
551 	struct rte_crypto_sym_xform aead_xform;
552 	struct rte_cryptodev_sym_session *sess = NULL;
553 
554 #ifdef RTE_LIBRTE_SECURITY
555 	/*
556 	 * security only
557 	 */
558 	if (options->op_type == CPERF_PDCP) {
559 		/* Setup Cipher Parameters */
560 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
561 		cipher_xform.next = NULL;
562 		cipher_xform.cipher.algo = options->cipher_algo;
563 		cipher_xform.cipher.op = options->cipher_op;
564 		cipher_xform.cipher.iv.offset = iv_offset;
565 		cipher_xform.cipher.iv.length = 4;
566 
567 		/* cipher different than null */
568 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
569 			cipher_xform.cipher.key.data = test_vector->cipher_key.data;
570 			cipher_xform.cipher.key.length = test_vector->cipher_key.length;
571 		} else {
572 			cipher_xform.cipher.key.data = NULL;
573 			cipher_xform.cipher.key.length = 0;
574 		}
575 
576 		/* Setup Auth Parameters */
577 		if (options->auth_algo != 0) {
578 			auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
579 			auth_xform.next = NULL;
580 			auth_xform.auth.algo = options->auth_algo;
581 			auth_xform.auth.op = options->auth_op;
582 			auth_xform.auth.iv.offset = iv_offset +
583 				cipher_xform.cipher.iv.length;
584 
585 			/* auth different than null */
586 			if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
587 				auth_xform.auth.digest_length = options->digest_sz;
588 				auth_xform.auth.key.length = test_vector->auth_key.length;
589 				auth_xform.auth.key.data = test_vector->auth_key.data;
590 				auth_xform.auth.iv.length = test_vector->auth_iv.length;
591 			} else {
592 				auth_xform.auth.digest_length = 0;
593 				auth_xform.auth.key.length = 0;
594 				auth_xform.auth.key.data = NULL;
595 				auth_xform.auth.iv.length = 0;
596 			}
597 
598 			cipher_xform.next = &auth_xform;
599 		} else {
600 			cipher_xform.next = NULL;
601 		}
602 
603 		struct rte_security_session_conf sess_conf = {
604 			.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
605 			.protocol = RTE_SECURITY_PROTOCOL_PDCP,
606 			{.pdcp = {
607 				.bearer = 0x16,
608 				.domain = options->pdcp_domain,
609 				.pkt_dir = 0,
610 				.sn_size = options->pdcp_sn_sz,
611 				.hfn = options->pdcp_ses_hfn_en ?
612 					PDCP_DEFAULT_HFN : 0,
613 				.hfn_threshold = 0x70C0A,
614 				.hfn_ovrd = !(options->pdcp_ses_hfn_en),
615 			} },
616 			.crypto_xform = &cipher_xform
617 		};
618 
619 		struct rte_security_ctx *ctx = (struct rte_security_ctx *)
620 					rte_cryptodev_get_sec_ctx(dev_id);
621 
622 		/* Create security session */
623 		return (void *)rte_security_session_create(ctx,
624 					&sess_conf, sess_mp);
625 	}
626 	if (options->op_type == CPERF_DOCSIS) {
627 		enum rte_security_docsis_direction direction;
628 
629 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
630 		cipher_xform.next = NULL;
631 		cipher_xform.cipher.algo = options->cipher_algo;
632 		cipher_xform.cipher.op = options->cipher_op;
633 		cipher_xform.cipher.iv.offset = iv_offset;
634 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
635 			cipher_xform.cipher.key.data =
636 				test_vector->cipher_key.data;
637 			cipher_xform.cipher.key.length =
638 				test_vector->cipher_key.length;
639 			cipher_xform.cipher.iv.length =
640 				test_vector->cipher_iv.length;
641 		} else {
642 			cipher_xform.cipher.key.data = NULL;
643 			cipher_xform.cipher.key.length = 0;
644 			cipher_xform.cipher.iv.length = 0;
645 		}
646 		cipher_xform.next = NULL;
647 
648 		if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
649 			direction = RTE_SECURITY_DOCSIS_DOWNLINK;
650 		else
651 			direction = RTE_SECURITY_DOCSIS_UPLINK;
652 
653 		struct rte_security_session_conf sess_conf = {
654 			.action_type =
655 				RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
656 			.protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
657 			{.docsis = {
658 				.direction = direction,
659 			} },
660 			.crypto_xform = &cipher_xform
661 		};
662 		struct rte_security_ctx *ctx = (struct rte_security_ctx *)
663 					rte_cryptodev_get_sec_ctx(dev_id);
664 
665 		/* Create security session */
666 		return (void *)rte_security_session_create(ctx,
667 					&sess_conf, priv_mp);
668 	}
669 #endif
670 	sess = rte_cryptodev_sym_session_create(sess_mp);
671 	/*
672 	 * cipher only
673 	 */
674 	if (options->op_type == CPERF_CIPHER_ONLY) {
675 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
676 		cipher_xform.next = NULL;
677 		cipher_xform.cipher.algo = options->cipher_algo;
678 		cipher_xform.cipher.op = options->cipher_op;
679 		cipher_xform.cipher.iv.offset = iv_offset;
680 
681 		/* cipher different than null */
682 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
683 			cipher_xform.cipher.key.data =
684 					test_vector->cipher_key.data;
685 			cipher_xform.cipher.key.length =
686 					test_vector->cipher_key.length;
687 			cipher_xform.cipher.iv.length =
688 					test_vector->cipher_iv.length;
689 		} else {
690 			cipher_xform.cipher.key.data = NULL;
691 			cipher_xform.cipher.key.length = 0;
692 			cipher_xform.cipher.iv.length = 0;
693 		}
694 		/* create crypto session */
695 		rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
696 				priv_mp);
697 	/*
698 	 *  auth only
699 	 */
700 	} else if (options->op_type == CPERF_AUTH_ONLY) {
701 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
702 		auth_xform.next = NULL;
703 		auth_xform.auth.algo = options->auth_algo;
704 		auth_xform.auth.op = options->auth_op;
705 		auth_xform.auth.iv.offset = iv_offset;
706 
707 		/* auth different than null */
708 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
709 			auth_xform.auth.digest_length =
710 					options->digest_sz;
711 			auth_xform.auth.key.length =
712 					test_vector->auth_key.length;
713 			auth_xform.auth.key.data = test_vector->auth_key.data;
714 			auth_xform.auth.iv.length =
715 					test_vector->auth_iv.length;
716 		} else {
717 			auth_xform.auth.digest_length = 0;
718 			auth_xform.auth.key.length = 0;
719 			auth_xform.auth.key.data = NULL;
720 			auth_xform.auth.iv.length = 0;
721 		}
722 		/* create crypto session */
723 		rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
724 				priv_mp);
725 	/*
726 	 * cipher and auth
727 	 */
728 	} else if (options->op_type == CPERF_CIPHER_THEN_AUTH
729 			|| options->op_type == CPERF_AUTH_THEN_CIPHER) {
730 		/*
731 		 * cipher
732 		 */
733 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
734 		cipher_xform.next = NULL;
735 		cipher_xform.cipher.algo = options->cipher_algo;
736 		cipher_xform.cipher.op = options->cipher_op;
737 		cipher_xform.cipher.iv.offset = iv_offset;
738 
739 		/* cipher different than null */
740 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
741 			cipher_xform.cipher.key.data =
742 					test_vector->cipher_key.data;
743 			cipher_xform.cipher.key.length =
744 					test_vector->cipher_key.length;
745 			cipher_xform.cipher.iv.length =
746 					test_vector->cipher_iv.length;
747 		} else {
748 			cipher_xform.cipher.key.data = NULL;
749 			cipher_xform.cipher.key.length = 0;
750 			cipher_xform.cipher.iv.length = 0;
751 		}
752 
753 		/*
754 		 * auth
755 		 */
756 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
757 		auth_xform.next = NULL;
758 		auth_xform.auth.algo = options->auth_algo;
759 		auth_xform.auth.op = options->auth_op;
760 		auth_xform.auth.iv.offset = iv_offset +
761 			cipher_xform.cipher.iv.length;
762 
763 		/* auth different than null */
764 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
765 			auth_xform.auth.digest_length = options->digest_sz;
766 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
767 			auth_xform.auth.key.length =
768 					test_vector->auth_key.length;
769 			auth_xform.auth.key.data =
770 					test_vector->auth_key.data;
771 		} else {
772 			auth_xform.auth.digest_length = 0;
773 			auth_xform.auth.key.length = 0;
774 			auth_xform.auth.key.data = NULL;
775 			auth_xform.auth.iv.length = 0;
776 		}
777 
778 		/* cipher then auth */
779 		if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
780 			cipher_xform.next = &auth_xform;
781 			/* create crypto session */
782 			rte_cryptodev_sym_session_init(dev_id,
783 					sess, &cipher_xform, priv_mp);
784 		} else { /* auth then cipher */
785 			auth_xform.next = &cipher_xform;
786 			/* create crypto session */
787 			rte_cryptodev_sym_session_init(dev_id,
788 					sess, &auth_xform, priv_mp);
789 		}
790 	} else { /* options->op_type == CPERF_AEAD */
791 		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
792 		aead_xform.next = NULL;
793 		aead_xform.aead.algo = options->aead_algo;
794 		aead_xform.aead.op = options->aead_op;
795 		aead_xform.aead.iv.offset = iv_offset;
796 
797 		aead_xform.aead.key.data =
798 					test_vector->aead_key.data;
799 		aead_xform.aead.key.length =
800 					test_vector->aead_key.length;
801 		aead_xform.aead.iv.length = test_vector->aead_iv.length;
802 
803 		aead_xform.aead.digest_length = options->digest_sz;
804 		aead_xform.aead.aad_length =
805 					options->aead_aad_sz;
806 
807 		/* Create crypto session */
808 		rte_cryptodev_sym_session_init(dev_id,
809 					sess, &aead_xform, priv_mp);
810 	}
811 
812 	return sess;
813 }
814 
815 int
816 cperf_get_op_functions(const struct cperf_options *options,
817 		struct cperf_op_fns *op_fns)
818 {
819 	memset(op_fns, 0, sizeof(struct cperf_op_fns));
820 
821 	op_fns->sess_create = cperf_create_session;
822 
823 	if (options->op_type == CPERF_AEAD) {
824 		op_fns->populate_ops = cperf_set_ops_aead;
825 		return 0;
826 	}
827 
828 	if (options->op_type == CPERF_AUTH_THEN_CIPHER
829 			|| options->op_type == CPERF_CIPHER_THEN_AUTH) {
830 		op_fns->populate_ops = cperf_set_ops_cipher_auth;
831 		return 0;
832 	}
833 	if (options->op_type == CPERF_AUTH_ONLY) {
834 		if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
835 			op_fns->populate_ops = cperf_set_ops_null_auth;
836 		else
837 			op_fns->populate_ops = cperf_set_ops_auth;
838 		return 0;
839 	}
840 	if (options->op_type == CPERF_CIPHER_ONLY) {
841 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
842 			op_fns->populate_ops = cperf_set_ops_null_cipher;
843 		else
844 			op_fns->populate_ops = cperf_set_ops_cipher;
845 		return 0;
846 	}
847 #ifdef RTE_LIBRTE_SECURITY
848 	if (options->op_type == CPERF_PDCP) {
849 		op_fns->populate_ops = cperf_set_ops_security;
850 		return 0;
851 	}
852 	if (options->op_type == CPERF_DOCSIS) {
853 		op_fns->populate_ops = cperf_set_ops_security;
854 		return 0;
855 	}
856 #endif
857 	return -1;
858 }
859