xref: /dpdk/app/test-crypto-perf/cperf_ops.c (revision 945acb4a0d644d194f1823084a234f9c286dcf8c)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #include <rte_cryptodev.h>
6 
7 #include "cperf_ops.h"
8 #include "cperf_test_vectors.h"
9 
10 static int
11 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
12 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
13 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
14 		const struct cperf_options *options,
15 		const struct cperf_test_vector *test_vector __rte_unused,
16 		uint16_t iv_offset __rte_unused)
17 {
18 	uint16_t i;
19 
20 	for (i = 0; i < nb_ops; i++) {
21 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
22 
23 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
24 		rte_crypto_op_attach_sym_session(ops[i], sess);
25 
26 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
27 							src_buf_offset);
28 
29 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
30 		if (dst_buf_offset == 0)
31 			sym_op->m_dst = NULL;
32 		else
33 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
34 							dst_buf_offset);
35 
36 		/* cipher parameters */
37 		sym_op->cipher.data.length = options->test_buffer_size;
38 		sym_op->cipher.data.offset = 0;
39 	}
40 
41 	return 0;
42 }
43 
44 static int
45 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
46 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
47 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
48 		const struct cperf_options *options,
49 		const struct cperf_test_vector *test_vector __rte_unused,
50 		uint16_t iv_offset __rte_unused)
51 {
52 	uint16_t i;
53 
54 	for (i = 0; i < nb_ops; i++) {
55 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
56 
57 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
58 		rte_crypto_op_attach_sym_session(ops[i], sess);
59 
60 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
61 							src_buf_offset);
62 
63 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
64 		if (dst_buf_offset == 0)
65 			sym_op->m_dst = NULL;
66 		else
67 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
68 							dst_buf_offset);
69 
70 		/* auth parameters */
71 		sym_op->auth.data.length = options->test_buffer_size;
72 		sym_op->auth.data.offset = 0;
73 	}
74 
75 	return 0;
76 }
77 
78 static int
79 cperf_set_ops_cipher(struct rte_crypto_op **ops,
80 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
81 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
82 		const struct cperf_options *options,
83 		const struct cperf_test_vector *test_vector,
84 		uint16_t iv_offset)
85 {
86 	uint16_t i;
87 
88 	for (i = 0; i < nb_ops; i++) {
89 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
90 
91 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
92 		rte_crypto_op_attach_sym_session(ops[i], sess);
93 
94 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
95 							src_buf_offset);
96 
97 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
98 		if (dst_buf_offset == 0)
99 			sym_op->m_dst = NULL;
100 		else
101 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
102 							dst_buf_offset);
103 
104 		/* cipher parameters */
105 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
106 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
107 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
108 			sym_op->cipher.data.length = options->test_buffer_size << 3;
109 		else
110 			sym_op->cipher.data.length = options->test_buffer_size;
111 
112 		sym_op->cipher.data.offset = 0;
113 	}
114 
115 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
116 		for (i = 0; i < nb_ops; i++) {
117 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
118 					uint8_t *, iv_offset);
119 
120 			memcpy(iv_ptr, test_vector->cipher_iv.data,
121 					test_vector->cipher_iv.length);
122 
123 		}
124 	}
125 
126 	return 0;
127 }
128 
129 static int
130 cperf_set_ops_auth(struct rte_crypto_op **ops,
131 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
132 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
133 		const struct cperf_options *options,
134 		const struct cperf_test_vector *test_vector,
135 		uint16_t iv_offset)
136 {
137 	uint16_t i;
138 
139 	for (i = 0; i < nb_ops; i++) {
140 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
141 
142 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
143 		rte_crypto_op_attach_sym_session(ops[i], sess);
144 
145 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
146 							src_buf_offset);
147 
148 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
149 		if (dst_buf_offset == 0)
150 			sym_op->m_dst = NULL;
151 		else
152 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
153 							dst_buf_offset);
154 
155 		if (test_vector->auth_iv.length) {
156 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
157 								uint8_t *,
158 								iv_offset);
159 			memcpy(iv_ptr, test_vector->auth_iv.data,
160 					test_vector->auth_iv.length);
161 		}
162 
163 		/* authentication parameters */
164 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
165 			sym_op->auth.digest.data = test_vector->digest.data;
166 			sym_op->auth.digest.phys_addr =
167 					test_vector->digest.phys_addr;
168 		} else {
169 
170 			uint32_t offset = options->test_buffer_size;
171 			struct rte_mbuf *buf, *tbuf;
172 
173 			if (options->out_of_place) {
174 				buf = sym_op->m_dst;
175 			} else {
176 				tbuf = sym_op->m_src;
177 				while ((tbuf->next != NULL) &&
178 						(offset >= tbuf->data_len)) {
179 					offset -= tbuf->data_len;
180 					tbuf = tbuf->next;
181 				}
182 				/*
183 				 * If there is not enough room in segment,
184 				 * place the digest in the next segment
185 				 */
186 				if ((tbuf->data_len - offset) < options->digest_sz) {
187 					tbuf = tbuf->next;
188 					offset = 0;
189 				}
190 				buf = tbuf;
191 			}
192 
193 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
194 					uint8_t *, offset);
195 			sym_op->auth.digest.phys_addr =
196 					rte_pktmbuf_iova_offset(buf, offset);
197 
198 		}
199 
200 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
201 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
202 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
203 			sym_op->auth.data.length = options->test_buffer_size << 3;
204 		else
205 			sym_op->auth.data.length = options->test_buffer_size;
206 
207 		sym_op->auth.data.offset = 0;
208 	}
209 
210 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
211 		if (test_vector->auth_iv.length) {
212 			for (i = 0; i < nb_ops; i++) {
213 				uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
214 						uint8_t *, iv_offset);
215 
216 				memcpy(iv_ptr, test_vector->auth_iv.data,
217 						test_vector->auth_iv.length);
218 			}
219 		}
220 	}
221 	return 0;
222 }
223 
224 static int
225 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
226 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
227 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
228 		const struct cperf_options *options,
229 		const struct cperf_test_vector *test_vector,
230 		uint16_t iv_offset)
231 {
232 	uint16_t i;
233 
234 	for (i = 0; i < nb_ops; i++) {
235 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
236 
237 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
238 		rte_crypto_op_attach_sym_session(ops[i], sess);
239 
240 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
241 							src_buf_offset);
242 
243 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
244 		if (dst_buf_offset == 0)
245 			sym_op->m_dst = NULL;
246 		else
247 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
248 							dst_buf_offset);
249 
250 		/* cipher parameters */
251 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
252 				options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
253 				options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
254 			sym_op->cipher.data.length = options->test_buffer_size << 3;
255 		else
256 			sym_op->cipher.data.length = options->test_buffer_size;
257 
258 		sym_op->cipher.data.offset = 0;
259 
260 		/* authentication parameters */
261 		if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
262 			sym_op->auth.digest.data = test_vector->digest.data;
263 			sym_op->auth.digest.phys_addr =
264 					test_vector->digest.phys_addr;
265 		} else {
266 
267 			uint32_t offset = options->test_buffer_size;
268 			struct rte_mbuf *buf, *tbuf;
269 
270 			if (options->out_of_place) {
271 				buf = sym_op->m_dst;
272 			} else {
273 				tbuf = sym_op->m_src;
274 				while ((tbuf->next != NULL) &&
275 						(offset >= tbuf->data_len)) {
276 					offset -= tbuf->data_len;
277 					tbuf = tbuf->next;
278 				}
279 				/*
280 				 * If there is not enough room in segment,
281 				 * place the digest in the next segment
282 				 */
283 				if ((tbuf->data_len - offset) < options->digest_sz) {
284 					tbuf = tbuf->next;
285 					offset = 0;
286 				}
287 				buf = tbuf;
288 			}
289 
290 			sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
291 					uint8_t *, offset);
292 			sym_op->auth.digest.phys_addr =
293 					rte_pktmbuf_iova_offset(buf, offset);
294 		}
295 
296 		if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
297 				options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
298 				options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
299 			sym_op->auth.data.length = options->test_buffer_size << 3;
300 		else
301 			sym_op->auth.data.length = options->test_buffer_size;
302 
303 		sym_op->auth.data.offset = 0;
304 	}
305 
306 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
307 		for (i = 0; i < nb_ops; i++) {
308 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
309 					uint8_t *, iv_offset);
310 
311 			memcpy(iv_ptr, test_vector->cipher_iv.data,
312 					test_vector->cipher_iv.length);
313 			if (test_vector->auth_iv.length) {
314 				/*
315 				 * Copy IV after the crypto operation and
316 				 * the cipher IV
317 				 */
318 				iv_ptr += test_vector->cipher_iv.length;
319 				memcpy(iv_ptr, test_vector->auth_iv.data,
320 						test_vector->auth_iv.length);
321 			}
322 		}
323 
324 	}
325 
326 	return 0;
327 }
328 
329 static int
330 cperf_set_ops_aead(struct rte_crypto_op **ops,
331 		uint32_t src_buf_offset, uint32_t dst_buf_offset,
332 		uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
333 		const struct cperf_options *options,
334 		const struct cperf_test_vector *test_vector,
335 		uint16_t iv_offset)
336 {
337 	uint16_t i;
338 	/* AAD is placed after the IV */
339 	uint16_t aad_offset = iv_offset +
340 			RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16);
341 
342 	for (i = 0; i < nb_ops; i++) {
343 		struct rte_crypto_sym_op *sym_op = ops[i]->sym;
344 
345 		ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
346 		rte_crypto_op_attach_sym_session(ops[i], sess);
347 
348 		sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] +
349 							src_buf_offset);
350 
351 		/* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */
352 		if (dst_buf_offset == 0)
353 			sym_op->m_dst = NULL;
354 		else
355 			sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] +
356 							dst_buf_offset);
357 
358 		/* AEAD parameters */
359 		sym_op->aead.data.length = options->test_buffer_size;
360 		sym_op->aead.data.offset = 0;
361 
362 		sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i],
363 					uint8_t *, aad_offset);
364 		sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i],
365 					aad_offset);
366 
367 		if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) {
368 			sym_op->aead.digest.data = test_vector->digest.data;
369 			sym_op->aead.digest.phys_addr =
370 					test_vector->digest.phys_addr;
371 		} else {
372 
373 			uint32_t offset = sym_op->aead.data.length +
374 						sym_op->aead.data.offset;
375 			struct rte_mbuf *buf, *tbuf;
376 
377 			if (options->out_of_place) {
378 				buf = sym_op->m_dst;
379 			} else {
380 				tbuf = sym_op->m_src;
381 				while ((tbuf->next != NULL) &&
382 						(offset >= tbuf->data_len)) {
383 					offset -= tbuf->data_len;
384 					tbuf = tbuf->next;
385 				}
386 				/*
387 				 * If there is not enough room in segment,
388 				 * place the digest in the next segment
389 				 */
390 				if ((tbuf->data_len - offset) < options->digest_sz) {
391 					tbuf = tbuf->next;
392 					offset = 0;
393 				}
394 				buf = tbuf;
395 			}
396 
397 			sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf,
398 					uint8_t *, offset);
399 			sym_op->aead.digest.phys_addr =
400 					rte_pktmbuf_iova_offset(buf, offset);
401 		}
402 	}
403 
404 	if (options->test == CPERF_TEST_TYPE_VERIFY) {
405 		for (i = 0; i < nb_ops; i++) {
406 			uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
407 					uint8_t *, iv_offset);
408 
409 			/*
410 			 * If doing AES-CCM, nonce is copied one byte
411 			 * after the start of IV field, and AAD is copied
412 			 * 18 bytes after the start of the AAD field.
413 			 */
414 			if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) {
415 				memcpy(iv_ptr + 1, test_vector->aead_iv.data,
416 					test_vector->aead_iv.length);
417 
418 				memcpy(ops[i]->sym->aead.aad.data + 18,
419 					test_vector->aad.data,
420 					test_vector->aad.length);
421 			} else {
422 				memcpy(iv_ptr, test_vector->aead_iv.data,
423 					test_vector->aead_iv.length);
424 
425 				memcpy(ops[i]->sym->aead.aad.data,
426 					test_vector->aad.data,
427 					test_vector->aad.length);
428 			}
429 		}
430 	}
431 
432 	return 0;
433 }
434 
435 static struct rte_cryptodev_sym_session *
436 cperf_create_session(struct rte_mempool *sess_mp,
437 	uint8_t dev_id,
438 	const struct cperf_options *options,
439 	const struct cperf_test_vector *test_vector,
440 	uint16_t iv_offset)
441 {
442 	struct rte_crypto_sym_xform cipher_xform;
443 	struct rte_crypto_sym_xform auth_xform;
444 	struct rte_crypto_sym_xform aead_xform;
445 	struct rte_cryptodev_sym_session *sess = NULL;
446 
447 	sess = rte_cryptodev_sym_session_create(sess_mp);
448 	/*
449 	 * cipher only
450 	 */
451 	if (options->op_type == CPERF_CIPHER_ONLY) {
452 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
453 		cipher_xform.next = NULL;
454 		cipher_xform.cipher.algo = options->cipher_algo;
455 		cipher_xform.cipher.op = options->cipher_op;
456 		cipher_xform.cipher.iv.offset = iv_offset;
457 
458 		/* cipher different than null */
459 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
460 			cipher_xform.cipher.key.data =
461 					test_vector->cipher_key.data;
462 			cipher_xform.cipher.key.length =
463 					test_vector->cipher_key.length;
464 			cipher_xform.cipher.iv.length =
465 					test_vector->cipher_iv.length;
466 		} else {
467 			cipher_xform.cipher.key.data = NULL;
468 			cipher_xform.cipher.key.length = 0;
469 			cipher_xform.cipher.iv.length = 0;
470 		}
471 		/* create crypto session */
472 		rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform,
473 				sess_mp);
474 	/*
475 	 *  auth only
476 	 */
477 	} else if (options->op_type == CPERF_AUTH_ONLY) {
478 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
479 		auth_xform.next = NULL;
480 		auth_xform.auth.algo = options->auth_algo;
481 		auth_xform.auth.op = options->auth_op;
482 
483 		/* auth different than null */
484 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
485 			auth_xform.auth.digest_length =
486 					options->digest_sz;
487 			auth_xform.auth.key.length =
488 					test_vector->auth_key.length;
489 			auth_xform.auth.key.data = test_vector->auth_key.data;
490 			auth_xform.auth.iv.length =
491 					test_vector->auth_iv.length;
492 		} else {
493 			auth_xform.auth.digest_length = 0;
494 			auth_xform.auth.key.length = 0;
495 			auth_xform.auth.key.data = NULL;
496 			auth_xform.auth.iv.length = 0;
497 		}
498 		/* create crypto session */
499 		rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform,
500 				sess_mp);
501 	/*
502 	 * cipher and auth
503 	 */
504 	} else if (options->op_type == CPERF_CIPHER_THEN_AUTH
505 			|| options->op_type == CPERF_AUTH_THEN_CIPHER) {
506 		/*
507 		 * cipher
508 		 */
509 		cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
510 		cipher_xform.next = NULL;
511 		cipher_xform.cipher.algo = options->cipher_algo;
512 		cipher_xform.cipher.op = options->cipher_op;
513 		cipher_xform.cipher.iv.offset = iv_offset;
514 
515 		/* cipher different than null */
516 		if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
517 			cipher_xform.cipher.key.data =
518 					test_vector->cipher_key.data;
519 			cipher_xform.cipher.key.length =
520 					test_vector->cipher_key.length;
521 			cipher_xform.cipher.iv.length =
522 					test_vector->cipher_iv.length;
523 		} else {
524 			cipher_xform.cipher.key.data = NULL;
525 			cipher_xform.cipher.key.length = 0;
526 			cipher_xform.cipher.iv.length = 0;
527 		}
528 
529 		/*
530 		 * auth
531 		 */
532 		auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
533 		auth_xform.next = NULL;
534 		auth_xform.auth.algo = options->auth_algo;
535 		auth_xform.auth.op = options->auth_op;
536 
537 		/* auth different than null */
538 		if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
539 			auth_xform.auth.digest_length = options->digest_sz;
540 			auth_xform.auth.iv.length = test_vector->auth_iv.length;
541 			auth_xform.auth.key.length =
542 					test_vector->auth_key.length;
543 			auth_xform.auth.key.data =
544 					test_vector->auth_key.data;
545 		} else {
546 			auth_xform.auth.digest_length = 0;
547 			auth_xform.auth.key.length = 0;
548 			auth_xform.auth.key.data = NULL;
549 			auth_xform.auth.iv.length = 0;
550 		}
551 
552 		/* cipher then auth */
553 		if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
554 			cipher_xform.next = &auth_xform;
555 			/* create crypto session */
556 			rte_cryptodev_sym_session_init(dev_id,
557 					sess, &cipher_xform, sess_mp);
558 		} else { /* auth then cipher */
559 			auth_xform.next = &cipher_xform;
560 			/* create crypto session */
561 			rte_cryptodev_sym_session_init(dev_id,
562 					sess, &auth_xform, sess_mp);
563 		}
564 	} else { /* options->op_type == CPERF_AEAD */
565 		aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD;
566 		aead_xform.next = NULL;
567 		aead_xform.aead.algo = options->aead_algo;
568 		aead_xform.aead.op = options->aead_op;
569 		aead_xform.aead.iv.offset = iv_offset;
570 
571 		aead_xform.aead.key.data =
572 					test_vector->aead_key.data;
573 		aead_xform.aead.key.length =
574 					test_vector->aead_key.length;
575 		aead_xform.aead.iv.length = test_vector->aead_iv.length;
576 
577 		aead_xform.aead.digest_length = options->digest_sz;
578 		aead_xform.aead.aad_length =
579 					options->aead_aad_sz;
580 
581 		/* Create crypto session */
582 		rte_cryptodev_sym_session_init(dev_id,
583 					sess, &aead_xform, sess_mp);
584 	}
585 
586 	return sess;
587 }
588 
589 int
590 cperf_get_op_functions(const struct cperf_options *options,
591 		struct cperf_op_fns *op_fns)
592 {
593 	memset(op_fns, 0, sizeof(struct cperf_op_fns));
594 
595 	op_fns->sess_create = cperf_create_session;
596 
597 	if (options->op_type == CPERF_AEAD) {
598 		op_fns->populate_ops = cperf_set_ops_aead;
599 		return 0;
600 	}
601 
602 	if (options->op_type == CPERF_AUTH_THEN_CIPHER
603 			|| options->op_type == CPERF_CIPHER_THEN_AUTH) {
604 		op_fns->populate_ops = cperf_set_ops_cipher_auth;
605 		return 0;
606 	}
607 	if (options->op_type == CPERF_AUTH_ONLY) {
608 		if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
609 			op_fns->populate_ops = cperf_set_ops_null_auth;
610 		else
611 			op_fns->populate_ops = cperf_set_ops_auth;
612 		return 0;
613 	}
614 	if (options->op_type == CPERF_CIPHER_ONLY) {
615 		if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
616 			op_fns->populate_ops = cperf_set_ops_null_cipher;
617 		else
618 			op_fns->populate_ops = cperf_set_ops_cipher;
619 		return 0;
620 	}
621 
622 	return -1;
623 }
624