xref: /dpdk/app/test-crypto-perf/cperf_ops.c (revision e9fd1ebf981f361844aea9ec94e17f4bda5e1479)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #include <rte_cryptodev.h>
6 #include <rte_ether.h>
7 #include <rte_ip.h>
8 
9 #include "cperf_ops.h"
10 #include "cperf_test_vectors.h"
11 
12 static void
13 cperf_set_ops_asym(struct rte_crypto_op **ops,
14 		   uint32_t src_buf_offset __rte_unused,
15 		   uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops,
16 		   void *sess,
17 		   const struct cperf_options *options,
18 		   const struct cperf_test_vector *test_vector __rte_unused,
19 		   uint16_t iv_offset __rte_unused,
20 		   uint32_t *imix_idx __rte_unused,
21 		   uint64_t *tsc_start __rte_unused)
22 {
23 	uint16_t i;
24 	void *asym_sess = (void *)sess;
25 
26 	for (i = 0; i < nb_ops; i++) {
27 		struct rte_crypto_asym_op *asym_op = ops[i]->asym;
28 
29 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
30 		asym_op->modex.base.data = options->modex_data->base.data;
31 		asym_op->modex.base.length = options->modex_data->base.len;
32 		asym_op->modex.result.data = options->modex_data->result.data;
33 		asym_op->modex.result.length = options->modex_data->result.len;
34 		rte_crypto_op_attach_asym_session(ops[i], asym_sess);
35 	}
36 }
37 
38 #ifdef RTE_LIB_SECURITY
39 static void
40 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options,
41 			const struct cperf_test_vector *test_vector)
42 {
43 	struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *);
44 
45 	if (options->is_outbound) {
46 		memcpy(ip, test_vector->plaintext.data,
47 		       sizeof(struct rte_ipv4_hdr));
48 
49 		ip->total_length = rte_cpu_to_be_16(m->data_len);
50 	}
51 }
52 
53 static void
54 cperf_set_ops_security(struct rte_crypto_op **ops,
55 		uint32_t src_buf_offset __rte_unused,
56 		uint32_t dst_buf_offset __rte_unused,
57 		uint16_t nb_ops, void *sess,
58 		const struct cperf_options *options,
59 		const struct cperf_test_vector *test_vector,
60 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
61 		uint64_t *tsc_start)
62 {
63 	uint16_t i;
64 
65 	for (i = 0; i < nb_ops; i++) {
66 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
67 		void *sec_sess = (void *)sess;
68 		uint32_t buf_sz;
69 
70 		uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i],
71 					uint32_t *, iv_offset);
72 		*per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN;
73 
74 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
75 		rte_security_attach_session(ops[i], sec_sess);
76 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
77 							src_buf_offset);
78 
79 		if (options->op_type == CPERF_PDCP) {
80 			sym_op->m_src->buf_len = options->segment_sz;
81 			sym_op->m_src->data_len = options->test_buffer_size;
82 			sym_op->m_src->pkt_len = sym_op->m_src->data_len;
83 		}
84 
85 		if (options->op_type == CPERF_DOCSIS) {
86 			if (options->imix_distribution_count) {
87 				buf_sz = options->imix_buffer_sizes[*imix_idx];
88 				*imix_idx = (*imix_idx + 1) % options->pool_sz;
89 			} else
90 				buf_sz = options->test_buffer_size;
91 
92 			sym_op->m_src->buf_len = options->segment_sz;
93 			sym_op->m_src->data_len = buf_sz;
94 			sym_op->m_src->pkt_len = buf_sz;
95 
96 			/* DOCSIS header is not CRC'ed */
97 			sym_op->auth.data.offset = options->docsis_hdr_sz;
98 			sym_op->auth.data.length = buf_sz -
99 				sym_op->auth.data.offset - RTE_ETHER_CRC_LEN;
100 			/*
101 			 * DOCSIS header and SRC and DST MAC addresses are not
102 			 * ciphered
103 			 */
104 			sym_op->cipher.data.offset = sym_op->auth.data.offset +
105 				RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN;
106 			sym_op->cipher.data.length = buf_sz -
107 				sym_op->cipher.data.offset;
108 		}
109 
110 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
111 		if (dst_buf_offset == 0)
112 			sym_op->m_dst = NULL;
113 		else
114 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
115 							dst_buf_offset);
116 	}
117 
118 	RTE_SET_USED(tsc_start);
119 	RTE_SET_USED(test_vector);
120 }
121 
122 static void
123 cperf_set_ops_security_ipsec(struct rte_crypto_op **ops,
124 		uint32_t src_buf_offset __rte_unused,
125 		uint32_t dst_buf_offset __rte_unused,
126 		uint16_t nb_ops, void *sess,
127 		const struct cperf_options *options,
128 		const struct cperf_test_vector *test_vector,
129 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
130 		uint64_t *tsc_start)
131 {
132 	void *sec_sess = sess;
133 	const uint32_t test_buffer_size = options->test_buffer_size;
134 	const uint32_t headroom_sz = options->headroom_sz;
135 	const uint32_t segment_sz = options->segment_sz;
136 	uint64_t tsc_start_temp, tsc_end_temp;
137 	uint16_t i = 0;
138 
139 	RTE_SET_USED(imix_idx);
140 
141 	for (i = 0; i < nb_ops; i++) {
142 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
143 		struct rte_mbuf *m = sym_op->m_src;
144 
145 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
146 		rte_security_attach_session(ops[i], sec_sess);
147 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
148 							src_buf_offset);
149 
150 		/* In case of IPsec, headroom is consumed by PMD,
151 		 * hence resetting it.
152 		 */
153 		m->data_off = headroom_sz;
154 
155 		m->buf_len = segment_sz;
156 		m->data_len = test_buffer_size;
157 		m->pkt_len = test_buffer_size;
158 
159 		sym_op->m_dst = NULL;
160 	}
161 
162 	if (options->test_file != NULL)
163 		return;
164 
165 	tsc_start_temp = rte_rdtsc_precise();
166 
167 	for (i = 0; i < nb_ops; i++) {
168 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
169 		struct rte_mbuf *m = sym_op->m_src;
170 
171 		test_ipsec_vec_populate(m, options, test_vector);
172 	}
173 
174 	tsc_end_temp = rte_rdtsc_precise();
175 	*tsc_start += tsc_end_temp - tsc_start_temp;
176 }
177 
178 static void
179 cperf_set_ops_security_tls(struct rte_crypto_op **ops,
180 		uint32_t src_buf_offset __rte_unused,
181 		uint32_t dst_buf_offset __rte_unused,
182 		uint16_t nb_ops, void *sess,
183 		const struct cperf_options *options,
184 		const struct cperf_test_vector *test_vector,
185 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
186 		uint64_t *tsc_start)
187 {
188 	const uint32_t test_buffer_size = options->test_buffer_size;
189 	const uint32_t headroom_sz = options->headroom_sz;
190 	const uint32_t segment_sz = options->segment_sz;
191 	uint16_t i = 0;
192 
193 	RTE_SET_USED(imix_idx);
194 	RTE_SET_USED(tsc_start);
195 	RTE_SET_USED(test_vector);
196 
197 	for (i = 0; i < nb_ops; i++) {
198 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
199 		struct rte_mbuf *m = sym_op->m_src;
200 
201 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
202 		ops[i]->param1.tls_record.content_type = 0x17;
203 		rte_security_attach_session(ops[i], sess);
204 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + src_buf_offset);
205 
206 		m->data_off = headroom_sz;
207 		m->buf_len = segment_sz;
208 		m->data_len = test_buffer_size;
209 		m->pkt_len = test_buffer_size;
210 
211 		sym_op->m_dst = NULL;
212 	}
213 }
214 #endif
215 
216 static void
217 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
218 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
219 		uint16_t nb_ops, void *sess,
220 		const struct cperf_options *options,
221 		const struct cperf_test_vector *test_vector __rte_unused,
222 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
223 		uint64_t *tsc_start __rte_unused)
224 {
225 	uint16_t i;
226 
227 	for (i = 0; i < nb_ops; i++) {
228 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
229 
230 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
231 		rte_crypto_op_attach_sym_session(ops[i], sess);
232 
233 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
234 							src_buf_offset);
235 
236 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
237 		if (dst_buf_offset == 0)
238 			sym_op->m_dst = NULL;
239 		else
240 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
241 							dst_buf_offset);
242 
243 		/* cipher parameters */
244 		if (options->imix_distribution_count) {
245 			sym_op->cipher.data.length =
246 				options->imix_buffer_sizes[*imix_idx];
247 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
248 		} else
249 			sym_op->cipher.data.length = options->test_buffer_size;
250 		sym_op->cipher.data.offset = 0;
251 	}
252 }
253 
254 static void
255 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
256 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
257 		uint16_t nb_ops, void *sess,
258 		const struct cperf_options *options,
259 		const struct cperf_test_vector *test_vector __rte_unused,
260 		uint16_t iv_offset __rte_unused, uint32_t *imix_idx,
261 		uint64_t *tsc_start __rte_unused)
262 {
263 	uint16_t i;
264 
265 	for (i = 0; i < nb_ops; i++) {
266 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
267 
268 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
269 		rte_crypto_op_attach_sym_session(ops[i], sess);
270 
271 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
272 							src_buf_offset);
273 
274 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
275 		if (dst_buf_offset == 0)
276 			sym_op->m_dst = NULL;
277 		else
278 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
279 							dst_buf_offset);
280 
281 		/* auth parameters */
282 		if (options->imix_distribution_count) {
283 			sym_op->auth.data.length =
284 				options->imix_buffer_sizes[*imix_idx];
285 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
286 		} else
287 			sym_op->auth.data.length = options->test_buffer_size;
288 		sym_op->auth.data.offset = 0;
289 	}
290 }
291 
292 static void
293 cperf_set_ops_cipher(struct rte_crypto_op **ops,
294 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
295 		uint16_t nb_ops, void *sess,
296 		const struct cperf_options *options,
297 		const struct cperf_test_vector *test_vector,
298 		uint16_t iv_offset, uint32_t *imix_idx,
299 		uint64_t *tsc_start __rte_unused)
300 {
301 	uint16_t i;
302 
303 	for (i = 0; i < nb_ops; i++) {
304 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
305 
306 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
307 		rte_crypto_op_attach_sym_session(ops[i], sess);
308 
309 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
310 							src_buf_offset);
311 
312 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
313 		if (dst_buf_offset == 0)
314 			sym_op->m_dst = NULL;
315 		else
316 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
317 							dst_buf_offset);
318 
319 		/* cipher parameters */
320 		if (options->imix_distribution_count) {
321 			sym_op->cipher.data.length =
322 				options->imix_buffer_sizes[*imix_idx];
323 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
324 		} else
325 			sym_op->cipher.data.length = options->test_buffer_size;
326 
327 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
328 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
329 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
330 			sym_op->cipher.data.length <<= 3;
331 
332 		sym_op->cipher.data.offset = 0;
333 	}
334 
335 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
336 		for (i = 0; i < nb_ops; i++) {
337 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
338 					uint8_t *, iv_offset);
339 
340 			memcpy(iv_ptr, test_vector->cipher_iv.data,
341 					test_vector->cipher_iv.length);
342 
343 		}
344 	}
345 }
346 
347 static void
348 cperf_set_ops_auth(struct rte_crypto_op **ops,
349 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
350 		uint16_t nb_ops, void *sess,
351 		const struct cperf_options *options,
352 		const struct cperf_test_vector *test_vector,
353 		uint16_t iv_offset, uint32_t *imix_idx,
354 		uint64_t *tsc_start __rte_unused)
355 {
356 	uint16_t i;
357 
358 	for (i = 0; i < nb_ops; i++) {
359 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
360 
361 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
362 		rte_crypto_op_attach_sym_session(ops[i], sess);
363 
364 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
365 							src_buf_offset);
366 
367 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
368 		if (dst_buf_offset == 0)
369 			sym_op->m_dst = NULL;
370 		else
371 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
372 							dst_buf_offset);
373 
374 		if (test_vector->auth_iv.length) {
375 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
376 								uint8_t *,
377 								iv_offset);
378 			memcpy(iv_ptr, test_vector->auth_iv.data,
379 					test_vector->auth_iv.length);
380 		}
381 
382 		/* authentication parameters */
383 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
384 			sym_op->auth.digest.data = test_vector->digest.data;
385 			sym_op->auth.digest.phys_addr =
386 					test_vector->digest.phys_addr;
387 		} else {
388 
389 			uint32_t offset = options->test_buffer_size;
390 			struct rte_mbuf *buf, *tbuf;
391 
392 			if (options->out_of_place) {
393 				buf = sym_op->m_dst;
394 			} else {
395 				tbuf = sym_op->m_src;
396 				while ((tbuf->next != NULL) &&
397 						(offset >= tbuf->data_len)) {
398 					offset -= tbuf->data_len;
399 					tbuf = tbuf->next;
400 				}
401 				/*
402 				 * If there is not enough room in segment,
403 				 * place the digest in the next segment
404 				 */
405 				if ((tbuf->data_len - offset) < options->digest_sz) {
406 					tbuf = tbuf->next;
407 					offset = 0;
408 				}
409 				buf = tbuf;
410 			}
411 
412 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
413 					uint8_t *, offset);
414 			sym_op->auth.digest.phys_addr =
415 					rte_pktmbuf_iova_offset(buf, offset);
416 
417 		}
418 
419 		if (options->imix_distribution_count) {
420 			sym_op->auth.data.length =
421 				options->imix_buffer_sizes[*imix_idx];
422 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
423 		} else
424 			sym_op->auth.data.length = options->test_buffer_size;
425 
426 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
427 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
428 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
429 			sym_op->auth.data.length <<= 3;
430 
431 		sym_op->auth.data.offset = 0;
432 	}
433 
434 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
435 		if (test_vector->auth_iv.length) {
436 			for (i = 0; i < nb_ops; i++) {
437 				uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
438 						uint8_t *, iv_offset);
439 
440 				memcpy(iv_ptr, test_vector->auth_iv.data,
441 						test_vector->auth_iv.length);
442 			}
443 		}
444 	}
445 }
446 
447 static void
448 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
449 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
450 		uint16_t nb_ops, void *sess,
451 		const struct cperf_options *options,
452 		const struct cperf_test_vector *test_vector,
453 		uint16_t iv_offset, uint32_t *imix_idx,
454 		uint64_t *tsc_start __rte_unused)
455 {
456 	uint16_t i;
457 
458 	for (i = 0; i < nb_ops; i++) {
459 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
460 
461 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
462 		rte_crypto_op_attach_sym_session(ops[i], sess);
463 
464 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
465 							src_buf_offset);
466 
467 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
468 		if (dst_buf_offset == 0)
469 			sym_op->m_dst = NULL;
470 		else
471 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
472 							dst_buf_offset);
473 
474 		/* cipher parameters */
475 		if (options->imix_distribution_count) {
476 			sym_op->cipher.data.length =
477 				options->imix_buffer_sizes[*imix_idx];
478 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
479 		} else
480 			sym_op->cipher.data.length = options->test_buffer_size;
481 
482 		if ((options->auth_op == RTE_CRYPTO_AUTH_OP_GENERATE) &&
483 				(options->op_type == CPERF_AUTH_THEN_CIPHER))
484 			sym_op->cipher.data.length += options->digest_sz;
485 
486 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
487 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
488 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
489 			sym_op->cipher.data.length <<= 3;
490 
491 		sym_op->cipher.data.offset = 0;
492 
493 		/* authentication parameters */
494 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
495 			sym_op->auth.digest.data = test_vector->digest.data;
496 			sym_op->auth.digest.phys_addr =
497 					test_vector->digest.phys_addr;
498 		} else {
499 
500 			uint32_t offset = options->test_buffer_size;
501 			struct rte_mbuf *buf, *tbuf;
502 
503 			if (options->out_of_place) {
504 				buf = sym_op->m_dst;
505 			} else {
506 				tbuf = sym_op->m_src;
507 				while ((tbuf->next != NULL) &&
508 						(offset >= tbuf->data_len)) {
509 					offset -= tbuf->data_len;
510 					tbuf = tbuf->next;
511 				}
512 				/*
513 				 * If there is not enough room in segment,
514 				 * place the digest in the next segment
515 				 */
516 				if ((tbuf->data_len - offset) < options->digest_sz) {
517 					tbuf = tbuf->next;
518 					offset = 0;
519 				}
520 				buf = tbuf;
521 			}
522 
523 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
524 					uint8_t *, offset);
525 			sym_op->auth.digest.phys_addr =
526 					rte_pktmbuf_iova_offset(buf, offset);
527 		}
528 
529 		if (options->imix_distribution_count) {
530 			sym_op->auth.data.length =
531 				options->imix_buffer_sizes[*imix_idx];
532 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
533 		} else
534 			sym_op->auth.data.length = options->test_buffer_size;
535 
536 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
537 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
538 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
539 			sym_op->auth.data.length <<= 3;
540 
541 		sym_op->auth.data.offset = 0;
542 	}
543 
544 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
545 		for (i = 0; i < nb_ops; i++) {
546 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
547 					uint8_t *, iv_offset);
548 
549 			memcpy(iv_ptr, test_vector->cipher_iv.data,
550 					test_vector->cipher_iv.length);
551 			if (test_vector->auth_iv.length) {
552 				/*
553 				 * Copy IV after the crypto operation and
554 				 * the cipher IV
555 				 */
556 				iv_ptr += test_vector->cipher_iv.length;
557 				memcpy(iv_ptr, test_vector->auth_iv.data,
558 						test_vector->auth_iv.length);
559 			}
560 		}
561 
562 	}
563 }
564 
565 static void
566 cperf_set_ops_aead(struct rte_crypto_op **ops,
567 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
568 		uint16_t nb_ops, void *sess,
569 		const struct cperf_options *options,
570 		const struct cperf_test_vector *test_vector,
571 		uint16_t iv_offset, uint32_t *imix_idx,
572 		uint64_t *tsc_start __rte_unused)
573 {
574 	uint16_t i;
575 	/* AAD is placed after the IV */
576 	uint16_t aad_offset = iv_offset +
577 			RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
578 
579 	for (i = 0; i < nb_ops; i++) {
580 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
581 
582 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
583 		rte_crypto_op_attach_sym_session(ops[i], sess);
584 
585 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
586 							src_buf_offset);
587 
588 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
589 		if (dst_buf_offset == 0)
590 			sym_op->m_dst = NULL;
591 		else
592 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
593 							dst_buf_offset);
594 
595 		/* AEAD parameters */
596 		if (options->imix_distribution_count) {
597 			sym_op->aead.data.length =
598 				options->imix_buffer_sizes[*imix_idx];
599 			*imix_idx = (*imix_idx + 1) % options->pool_sz;
600 		} else
601 			sym_op->aead.data.length = options->test_buffer_size;
602 		sym_op->aead.data.offset = 0;
603 
604 		sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
605 					uint8_t *, aad_offset);
606 		sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
607 					aad_offset);
608 
609 		if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
610 			sym_op->aead.digest.data = test_vector->digest.data;
611 			sym_op->aead.digest.phys_addr =
612 					test_vector->digest.phys_addr;
613 		} else {
614 
615 			uint32_t offset = sym_op->aead.data.length +
616 						sym_op->aead.data.offset;
617 			struct rte_mbuf *buf, *tbuf;
618 
619 			if (options->out_of_place) {
620 				buf = sym_op->m_dst;
621 			} else {
622 				tbuf = sym_op->m_src;
623 				while ((tbuf->next != NULL) &&
624 						(offset >= tbuf->data_len)) {
625 					offset -= tbuf->data_len;
626 					tbuf = tbuf->next;
627 				}
628 				/*
629 				 * If there is not enough room in segment,
630 				 * place the digest in the next segment
631 				 */
632 				if ((tbuf->data_len - offset) < options->digest_sz) {
633 					tbuf = tbuf->next;
634 					offset = 0;
635 				}
636 				buf = tbuf;
637 			}
638 
639 			sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
640 					uint8_t *, offset);
641 			sym_op->aead.digest.phys_addr =
642 					rte_pktmbuf_iova_offset(buf, offset);
643 		}
644 	}
645 
646 	if ((options->test == CPERF_TEST_TYPE_VERIFY) ||
647 			(options->test == CPERF_TEST_TYPE_LATENCY)) {
648 		for (i = 0; i < nb_ops; i++) {
649 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
650 					uint8_t *, iv_offset);
651 
652 			/*
653 			 * If doing AES-CCM, nonce is copied one byte
654 			 * after the start of IV field, and AAD is copied
655 			 * 18 bytes after the start of the AAD field.
656 			 */
657 			if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
658 				memcpy(iv_ptr + 1, test_vector->aead_iv.data,
659 					test_vector->aead_iv.length);
660 
661 				memcpy(ops[i]->sym->aead.aad.data + 18,
662 					test_vector->aad.data,
663 					test_vector->aad.length);
664 			} else {
665 				memcpy(iv_ptr, test_vector->aead_iv.data,
666 					test_vector->aead_iv.length);
667 
668 				memcpy(ops[i]->sym->aead.aad.data,
669 					test_vector->aad.data,
670 					test_vector->aad.length);
671 			}
672 		}
673 	}
674 }
675 
676 static void *
677 create_ipsec_session(struct rte_mempool *sess_mp,
678 		uint8_t dev_id,
679 		const struct cperf_options *options,
680 		const struct cperf_test_vector *test_vector,
681 		uint16_t iv_offset)
682 {
683 	struct rte_crypto_sym_xform auth_xform = {0};
684 	struct rte_crypto_sym_xform *crypto_xform;
685 	struct rte_crypto_sym_xform xform = {0};
686 
687 	if (options->aead_algo != 0) {
688 		/* Setup AEAD Parameters */
689 		xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
690 		xform.next = NULL;
691 		xform.aead.algo = options->aead_algo;
692 		xform.aead.op = options->aead_op;
693 		xform.aead.iv.offset = iv_offset;
694 		xform.aead.key.data = test_vector->aead_key.data;
695 		xform.aead.key.length = test_vector->aead_key.length;
696 		xform.aead.iv.length = test_vector->aead_iv.length;
697 		xform.aead.digest_length = options->digest_sz;
698 		xform.aead.aad_length = options->aead_aad_sz;
699 		crypto_xform = &xform;
700 	} else if (options->cipher_algo != 0 && options->auth_algo != 0) {
701 		/* Setup Cipher Parameters */
702 		xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
703 		xform.cipher.algo = options->cipher_algo;
704 		xform.cipher.op = options->cipher_op;
705 		xform.cipher.iv.offset = iv_offset;
706 		xform.cipher.iv.length = test_vector->cipher_iv.length;
707 		/* cipher different than null */
708 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
709 			xform.cipher.key.data = test_vector->cipher_key.data;
710 			xform.cipher.key.length =
711 				test_vector->cipher_key.length;
712 		} else {
713 			xform.cipher.key.data = NULL;
714 			xform.cipher.key.length = 0;
715 		}
716 
717 		/* Setup Auth Parameters */
718 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
719 		auth_xform.auth.algo = options->auth_algo;
720 		auth_xform.auth.op = options->auth_op;
721 		auth_xform.auth.iv.offset = iv_offset +
722 				xform.cipher.iv.length;
723 		/* auth different than null */
724 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
725 			auth_xform.auth.digest_length = options->digest_sz;
726 			auth_xform.auth.key.length =
727 						test_vector->auth_key.length;
728 			auth_xform.auth.key.data = test_vector->auth_key.data;
729 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
730 		} else {
731 			auth_xform.auth.digest_length = 0;
732 			auth_xform.auth.key.length = 0;
733 			auth_xform.auth.key.data = NULL;
734 			auth_xform.auth.iv.length = 0;
735 		}
736 
737 		if (options->is_outbound) {
738 			crypto_xform = &xform;
739 			xform.next = &auth_xform;
740 			auth_xform.next = NULL;
741 		} else {
742 			crypto_xform = &auth_xform;
743 			auth_xform.next = &xform;
744 			xform.next = NULL;
745 		}
746 	} else {
747 		return NULL;
748 	}
749 
750 #define CPERF_IPSEC_SRC_IP	0x01010101
751 #define CPERF_IPSEC_DST_IP	0x02020202
752 #define CPERF_IPSEC_SALT	0x0
753 #define CPERF_IPSEC_DEFTTL	64
754 	struct rte_security_ipsec_tunnel_param tunnel = {
755 		.type = RTE_SECURITY_IPSEC_TUNNEL_IPV4,
756 		{.ipv4 = {
757 			.src_ip = { .s_addr = CPERF_IPSEC_SRC_IP},
758 			.dst_ip = { .s_addr = CPERF_IPSEC_DST_IP},
759 			.dscp = 0,
760 			.df = 0,
761 			.ttl = CPERF_IPSEC_DEFTTL,
762 		} },
763 	};
764 	struct rte_security_session_conf sess_conf = {
765 		.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
766 		.protocol = RTE_SECURITY_PROTOCOL_IPSEC,
767 		{.ipsec = {
768 			.spi = rte_lcore_id() + 1,
769 			/**< For testing sake, lcore_id is taken as SPI so that
770 			 * for every core a different session is created.
771 			 */
772 			.salt = CPERF_IPSEC_SALT,
773 			.options = { 0 },
774 			.replay_win_sz = 0,
775 			.proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP,
776 			.mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL,
777 			.tunnel = tunnel,
778 		} },
779 		.userdata = NULL,
780 		.crypto_xform = crypto_xform,
781 	};
782 
783 	if (options->is_outbound)
784 		sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS;
785 	else
786 		sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS;
787 
788 	void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
789 
790 	/* Create security session */
791 	return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
792 }
793 
794 static void *
795 create_tls_session(struct rte_mempool *sess_mp,
796 		uint8_t dev_id,
797 		const struct cperf_options *options,
798 		const struct cperf_test_vector *test_vector,
799 		uint16_t iv_offset)
800 {
801 	struct rte_crypto_sym_xform auth_xform = {0};
802 	struct rte_crypto_sym_xform *crypto_xform;
803 	struct rte_crypto_sym_xform xform = {0};
804 
805 	if (options->aead_algo != 0) {
806 		/* Setup AEAD Parameters */
807 		xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
808 		xform.next = NULL;
809 		xform.aead.algo = options->aead_algo;
810 		xform.aead.op = options->aead_op;
811 		xform.aead.iv.offset = iv_offset;
812 		xform.aead.key.data = test_vector->aead_key.data;
813 		xform.aead.key.length = test_vector->aead_key.length;
814 		xform.aead.iv.length = test_vector->aead_iv.length;
815 		xform.aead.digest_length = options->digest_sz;
816 		xform.aead.aad_length = options->aead_aad_sz;
817 		crypto_xform = &xform;
818 	} else if (options->cipher_algo != 0 && options->auth_algo != 0) {
819 		/* Setup Cipher Parameters */
820 		xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
821 		xform.cipher.algo = options->cipher_algo;
822 		xform.cipher.op = options->cipher_op;
823 		xform.cipher.iv.offset = iv_offset;
824 		xform.cipher.iv.length = test_vector->cipher_iv.length;
825 		/* cipher different than null */
826 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
827 			xform.cipher.key.data = test_vector->cipher_key.data;
828 			xform.cipher.key.length = test_vector->cipher_key.length;
829 		} else {
830 			xform.cipher.key.data = NULL;
831 			xform.cipher.key.length = 0;
832 		}
833 
834 		/* Setup Auth Parameters */
835 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
836 		auth_xform.auth.algo = options->auth_algo;
837 		auth_xform.auth.op = options->auth_op;
838 		auth_xform.auth.iv.offset = iv_offset + xform.cipher.iv.length;
839 		/* auth different than null */
840 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
841 			auth_xform.auth.digest_length = options->digest_sz;
842 			auth_xform.auth.key.length = test_vector->auth_key.length;
843 			auth_xform.auth.key.data = test_vector->auth_key.data;
844 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
845 		} else {
846 			auth_xform.auth.digest_length = 0;
847 			auth_xform.auth.key.length = 0;
848 			auth_xform.auth.key.data = NULL;
849 			auth_xform.auth.iv.length = 0;
850 		}
851 
852 		if (options->is_outbound) {
853 			/* Currently supporting AUTH then Encrypt mode only for TLS. */
854 			crypto_xform = &auth_xform;
855 			auth_xform.next = &xform;
856 			xform.next = NULL;
857 		} else {
858 			crypto_xform = &xform;
859 			xform.next = &auth_xform;
860 			auth_xform.next = NULL;
861 		}
862 	} else {
863 		return NULL;
864 	}
865 
866 	struct rte_security_tls_record_sess_options opts = {
867 		.iv_gen_disable = 0,
868 		.extra_padding_enable = 0,
869 	};
870 	struct rte_security_session_conf sess_conf = {
871 		.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
872 		.protocol = RTE_SECURITY_PROTOCOL_TLS_RECORD,
873 		{.tls_record = {
874 			.ver = RTE_SECURITY_VERSION_TLS_1_2,
875 			.options = opts,
876 		} },
877 		.userdata = NULL,
878 		.crypto_xform = crypto_xform,
879 	};
880 	if (options->tls_version)
881 		sess_conf.tls_record.ver = options->tls_version;
882 
883 	if (options->is_outbound)
884 		sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_WRITE;
885 	else
886 		sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_READ;
887 
888 	void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
889 
890 	/* Create security session */
891 	return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
892 }
893 
894 static void *
895 cperf_create_session(struct rte_mempool *sess_mp,
896 	uint8_t dev_id,
897 	const struct cperf_options *options,
898 	const struct cperf_test_vector *test_vector,
899 	uint16_t iv_offset)
900 {
901 	struct rte_crypto_sym_xform cipher_xform;
902 	struct rte_crypto_sym_xform auth_xform;
903 	struct rte_crypto_sym_xform aead_xform;
904 	void *sess = NULL;
905 	void *asym_sess = NULL;
906 	struct rte_crypto_asym_xform xform = {0};
907 	int ret;
908 
909 	if (options->op_type == CPERF_ASYM_MODEX) {
910 		xform.next = NULL;
911 		xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX;
912 		xform.modex.modulus.data = options->modex_data->modulus.data;
913 		xform.modex.modulus.length = options->modex_data->modulus.len;
914 		xform.modex.exponent.data = options->modex_data->exponent.data;
915 		xform.modex.exponent.length = options->modex_data->exponent.len;
916 
917 		ret = rte_cryptodev_asym_session_create(dev_id, &xform,
918 				sess_mp, &asym_sess);
919 		if (ret < 0) {
920 			RTE_LOG(ERR, USER1, "Asym session create failed\n");
921 			return NULL;
922 		}
923 		return asym_sess;
924 	}
925 #ifdef RTE_LIB_SECURITY
926 	/*
927 	 * security only
928 	 */
929 	if (options->op_type == CPERF_PDCP) {
930 		/* Setup Cipher Parameters */
931 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
932 		cipher_xform.next = NULL;
933 		cipher_xform.cipher.algo = options->cipher_algo;
934 		cipher_xform.cipher.op = options->cipher_op;
935 		cipher_xform.cipher.iv.offset = iv_offset;
936 		cipher_xform.cipher.iv.length = 4;
937 
938 		/* cipher different than null */
939 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
940 			cipher_xform.cipher.key.data = test_vector->cipher_key.data;
941 			cipher_xform.cipher.key.length = test_vector->cipher_key.length;
942 		} else {
943 			cipher_xform.cipher.key.data = NULL;
944 			cipher_xform.cipher.key.length = 0;
945 		}
946 
947 		/* Setup Auth Parameters */
948 		if (options->auth_algo != 0) {
949 			auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
950 			auth_xform.next = NULL;
951 			auth_xform.auth.algo = options->auth_algo;
952 			auth_xform.auth.op = options->auth_op;
953 			auth_xform.auth.iv.offset = iv_offset +
954 				cipher_xform.cipher.iv.length;
955 
956 			/* auth different than null */
957 			if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
958 				auth_xform.auth.digest_length = options->digest_sz;
959 				auth_xform.auth.key.length = test_vector->auth_key.length;
960 				auth_xform.auth.key.data = test_vector->auth_key.data;
961 				auth_xform.auth.iv.length = test_vector->auth_iv.length;
962 			} else {
963 				auth_xform.auth.digest_length = 0;
964 				auth_xform.auth.key.length = 0;
965 				auth_xform.auth.key.data = NULL;
966 				auth_xform.auth.iv.length = 0;
967 			}
968 
969 			cipher_xform.next = &auth_xform;
970 		} else {
971 			cipher_xform.next = NULL;
972 		}
973 
974 		struct rte_security_session_conf sess_conf = {
975 			.action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
976 			.protocol = RTE_SECURITY_PROTOCOL_PDCP,
977 			{.pdcp = {
978 				.bearer = 0x16,
979 				.domain = options->pdcp_domain,
980 				.pkt_dir = 0,
981 				.sn_size = options->pdcp_sn_sz,
982 				.hfn = options->pdcp_ses_hfn_en ?
983 					PDCP_DEFAULT_HFN : 0,
984 				.hfn_threshold = 0x70C0A,
985 				.sdap_enabled = options->pdcp_sdap,
986 				.hfn_ovrd = !(options->pdcp_ses_hfn_en),
987 			} },
988 			.crypto_xform = &cipher_xform
989 		};
990 
991 		void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
992 
993 		/* Create security session */
994 		return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
995 	}
996 
997 	if (options->op_type == CPERF_IPSEC) {
998 		return create_ipsec_session(sess_mp, dev_id,
999 				options, test_vector, iv_offset);
1000 	}
1001 
1002 	if (options->op_type == CPERF_TLS) {
1003 		return create_tls_session(sess_mp, dev_id,
1004 				options, test_vector, iv_offset);
1005 	}
1006 
1007 	if (options->op_type == CPERF_DOCSIS) {
1008 		enum rte_security_docsis_direction direction;
1009 
1010 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1011 		cipher_xform.next = NULL;
1012 		cipher_xform.cipher.algo = options->cipher_algo;
1013 		cipher_xform.cipher.op = options->cipher_op;
1014 		cipher_xform.cipher.iv.offset = iv_offset;
1015 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1016 			cipher_xform.cipher.key.data =
1017 				test_vector->cipher_key.data;
1018 			cipher_xform.cipher.key.length =
1019 				test_vector->cipher_key.length;
1020 			cipher_xform.cipher.iv.length =
1021 				test_vector->cipher_iv.length;
1022 		} else {
1023 			cipher_xform.cipher.key.data = NULL;
1024 			cipher_xform.cipher.key.length = 0;
1025 			cipher_xform.cipher.iv.length = 0;
1026 		}
1027 		cipher_xform.next = NULL;
1028 
1029 		if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
1030 			direction = RTE_SECURITY_DOCSIS_DOWNLINK;
1031 		else
1032 			direction = RTE_SECURITY_DOCSIS_UPLINK;
1033 
1034 		struct rte_security_session_conf sess_conf = {
1035 			.action_type =
1036 				RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL,
1037 			.protocol = RTE_SECURITY_PROTOCOL_DOCSIS,
1038 			{.docsis = {
1039 				.direction = direction,
1040 			} },
1041 			.crypto_xform = &cipher_xform
1042 		};
1043 		void *ctx = rte_cryptodev_get_sec_ctx(dev_id);
1044 
1045 		/* Create security session */
1046 		return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp);
1047 	}
1048 #endif
1049 	/*
1050 	 * cipher only
1051 	 */
1052 	if (options->op_type == CPERF_CIPHER_ONLY) {
1053 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1054 		cipher_xform.next = NULL;
1055 		cipher_xform.cipher.algo = options->cipher_algo;
1056 		cipher_xform.cipher.op = options->cipher_op;
1057 		cipher_xform.cipher.iv.offset = iv_offset;
1058 
1059 		/* cipher different than null */
1060 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1061 			cipher_xform.cipher.key.data =
1062 					test_vector->cipher_key.data;
1063 			cipher_xform.cipher.key.length =
1064 					test_vector->cipher_key.length;
1065 			cipher_xform.cipher.iv.length =
1066 					test_vector->cipher_iv.length;
1067 		} else {
1068 			cipher_xform.cipher.key.data = NULL;
1069 			cipher_xform.cipher.key.length = 0;
1070 			cipher_xform.cipher.iv.length = 0;
1071 		}
1072 		/* create crypto session */
1073 		sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform,
1074 				sess_mp);
1075 	/*
1076 	 *  auth only
1077 	 */
1078 	} else if (options->op_type == CPERF_AUTH_ONLY) {
1079 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1080 		auth_xform.next = NULL;
1081 		auth_xform.auth.algo = options->auth_algo;
1082 		auth_xform.auth.op = options->auth_op;
1083 		auth_xform.auth.iv.offset = iv_offset;
1084 
1085 		/* auth different than null */
1086 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1087 			auth_xform.auth.digest_length =
1088 					options->digest_sz;
1089 			auth_xform.auth.key.length =
1090 					test_vector->auth_key.length;
1091 			auth_xform.auth.key.data = test_vector->auth_key.data;
1092 			auth_xform.auth.iv.length =
1093 					test_vector->auth_iv.length;
1094 		} else {
1095 			auth_xform.auth.digest_length = 0;
1096 			auth_xform.auth.key.length = 0;
1097 			auth_xform.auth.key.data = NULL;
1098 			auth_xform.auth.iv.length = 0;
1099 		}
1100 		/* create crypto session */
1101 		sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform,
1102 				sess_mp);
1103 	/*
1104 	 * cipher and auth
1105 	 */
1106 	} else if (options->op_type == CPERF_CIPHER_THEN_AUTH
1107 			|| options->op_type == CPERF_AUTH_THEN_CIPHER) {
1108 		/*
1109 		 * cipher
1110 		 */
1111 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
1112 		cipher_xform.next = NULL;
1113 		cipher_xform.cipher.algo = options->cipher_algo;
1114 		cipher_xform.cipher.op = options->cipher_op;
1115 		cipher_xform.cipher.iv.offset = iv_offset;
1116 
1117 		/* cipher different than null */
1118 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
1119 			cipher_xform.cipher.key.data =
1120 					test_vector->cipher_key.data;
1121 			cipher_xform.cipher.key.length =
1122 					test_vector->cipher_key.length;
1123 			cipher_xform.cipher.iv.length =
1124 					test_vector->cipher_iv.length;
1125 		} else {
1126 			cipher_xform.cipher.key.data = NULL;
1127 			cipher_xform.cipher.key.length = 0;
1128 			cipher_xform.cipher.iv.length = 0;
1129 		}
1130 
1131 		/*
1132 		 * auth
1133 		 */
1134 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
1135 		auth_xform.next = NULL;
1136 		auth_xform.auth.algo = options->auth_algo;
1137 		auth_xform.auth.op = options->auth_op;
1138 		auth_xform.auth.iv.offset = iv_offset +
1139 			cipher_xform.cipher.iv.length;
1140 
1141 		/* auth different than null */
1142 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
1143 			auth_xform.auth.digest_length = options->digest_sz;
1144 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
1145 			auth_xform.auth.key.length =
1146 					test_vector->auth_key.length;
1147 			auth_xform.auth.key.data =
1148 					test_vector->auth_key.data;
1149 		} else {
1150 			auth_xform.auth.digest_length = 0;
1151 			auth_xform.auth.key.length = 0;
1152 			auth_xform.auth.key.data = NULL;
1153 			auth_xform.auth.iv.length = 0;
1154 		}
1155 
1156 		/* cipher then auth */
1157 		if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
1158 			cipher_xform.next = &auth_xform;
1159 			/* create crypto session */
1160 			sess = rte_cryptodev_sym_session_create(dev_id,
1161 					&cipher_xform, sess_mp);
1162 		} else { /* auth then cipher */
1163 			auth_xform.next = &cipher_xform;
1164 			/* create crypto session */
1165 			sess = rte_cryptodev_sym_session_create(dev_id,
1166 					&auth_xform, sess_mp);
1167 		}
1168 	} else { /* options->op_type == CPERF_AEAD */
1169 		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
1170 		aead_xform.next = NULL;
1171 		aead_xform.aead.algo = options->aead_algo;
1172 		aead_xform.aead.op = options->aead_op;
1173 		aead_xform.aead.iv.offset = iv_offset;
1174 
1175 		aead_xform.aead.key.data =
1176 					test_vector->aead_key.data;
1177 		aead_xform.aead.key.length =
1178 					test_vector->aead_key.length;
1179 		aead_xform.aead.iv.length = test_vector->aead_iv.length;
1180 
1181 		aead_xform.aead.digest_length = options->digest_sz;
1182 		aead_xform.aead.aad_length =
1183 					options->aead_aad_sz;
1184 
1185 		/* Create crypto session */
1186 		sess = rte_cryptodev_sym_session_create(dev_id, &aead_xform,
1187 				sess_mp);
1188 	}
1189 
1190 	return sess;
1191 }
1192 
1193 int
1194 cperf_get_op_functions(const struct cperf_options *options,
1195 		struct cperf_op_fns *op_fns)
1196 {
1197 	memset(op_fns, 0, sizeof(struct cperf_op_fns));
1198 
1199 	op_fns->sess_create = cperf_create_session;
1200 
1201 	switch (options->op_type) {
1202 	case CPERF_AEAD:
1203 		op_fns->populate_ops = cperf_set_ops_aead;
1204 		break;
1205 
1206 	case CPERF_AUTH_THEN_CIPHER:
1207 	case CPERF_CIPHER_THEN_AUTH:
1208 		op_fns->populate_ops = cperf_set_ops_cipher_auth;
1209 		break;
1210 	case CPERF_AUTH_ONLY:
1211 		if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
1212 			op_fns->populate_ops = cperf_set_ops_null_auth;
1213 		else
1214 			op_fns->populate_ops = cperf_set_ops_auth;
1215 		break;
1216 	case CPERF_CIPHER_ONLY:
1217 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
1218 			op_fns->populate_ops = cperf_set_ops_null_cipher;
1219 		else
1220 			op_fns->populate_ops = cperf_set_ops_cipher;
1221 		break;
1222 	case CPERF_ASYM_MODEX:
1223 		op_fns->populate_ops = cperf_set_ops_asym;
1224 		break;
1225 #ifdef RTE_LIB_SECURITY
1226 	case CPERF_PDCP:
1227 	case CPERF_DOCSIS:
1228 		op_fns->populate_ops = cperf_set_ops_security;
1229 		break;
1230 	case CPERF_IPSEC:
1231 		op_fns->populate_ops = cperf_set_ops_security_ipsec;
1232 		break;
1233 	case CPERF_TLS:
1234 		op_fns->populate_ops = cperf_set_ops_security_tls;
1235 		break;
1236 #endif
1237 	default:
1238 		return -1;
1239 	}
1240 
1241 	return 0;
1242 }
1243