1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2016-2017 Intel Corporation 3 */ 4 5 #include <rte_cryptodev.h> 6 7 #include "cperf_ops.h" 8 #include "cperf_test_vectors.h" 9 10 static int 11 cperf_set_ops_null_cipher(struct rte_crypto_op **ops, 12 uint32_t src_buf_offset, uint32_t dst_buf_offset, 13 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 14 const struct cperf_options *options, 15 const struct cperf_test_vector *test_vector __rte_unused, 16 uint16_t iv_offset __rte_unused, uint32_t *imix_idx) 17 { 18 uint16_t i; 19 20 for (i = 0; i < nb_ops; i++) { 21 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 22 23 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 24 rte_crypto_op_attach_sym_session(ops[i], sess); 25 26 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 27 src_buf_offset); 28 29 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 30 if (dst_buf_offset == 0) 31 sym_op->m_dst = NULL; 32 else 33 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 34 dst_buf_offset); 35 36 /* cipher parameters */ 37 if (options->imix_distribution_count) { 38 sym_op->cipher.data.length = 39 options->imix_buffer_sizes[*imix_idx]; 40 *imix_idx = (*imix_idx + 1) % options->pool_sz; 41 } else 42 sym_op->cipher.data.length = options->test_buffer_size; 43 sym_op->cipher.data.offset = 0; 44 } 45 46 return 0; 47 } 48 49 static int 50 cperf_set_ops_null_auth(struct rte_crypto_op **ops, 51 uint32_t src_buf_offset, uint32_t dst_buf_offset, 52 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 53 const struct cperf_options *options, 54 const struct cperf_test_vector *test_vector __rte_unused, 55 uint16_t iv_offset __rte_unused, uint32_t *imix_idx) 56 { 57 uint16_t i; 58 59 for (i = 0; i < nb_ops; i++) { 60 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 61 62 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 63 rte_crypto_op_attach_sym_session(ops[i], sess); 64 65 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 66 src_buf_offset); 67 68 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 69 if (dst_buf_offset == 0) 70 sym_op->m_dst = NULL; 71 else 72 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 73 dst_buf_offset); 74 75 /* auth parameters */ 76 if (options->imix_distribution_count) { 77 sym_op->auth.data.length = 78 options->imix_buffer_sizes[*imix_idx]; 79 *imix_idx = (*imix_idx + 1) % options->pool_sz; 80 } else 81 sym_op->auth.data.length = options->test_buffer_size; 82 sym_op->auth.data.offset = 0; 83 } 84 85 return 0; 86 } 87 88 static int 89 cperf_set_ops_cipher(struct rte_crypto_op **ops, 90 uint32_t src_buf_offset, uint32_t dst_buf_offset, 91 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 92 const struct cperf_options *options, 93 const struct cperf_test_vector *test_vector, 94 uint16_t iv_offset, uint32_t *imix_idx) 95 { 96 uint16_t i; 97 98 for (i = 0; i < nb_ops; i++) { 99 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 100 101 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 102 rte_crypto_op_attach_sym_session(ops[i], sess); 103 104 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 105 src_buf_offset); 106 107 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 108 if (dst_buf_offset == 0) 109 sym_op->m_dst = NULL; 110 else 111 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 112 dst_buf_offset); 113 114 /* cipher parameters */ 115 if (options->imix_distribution_count) { 116 sym_op->cipher.data.length = 117 options->imix_buffer_sizes[*imix_idx]; 118 *imix_idx = (*imix_idx + 1) % options->pool_sz; 119 } else 120 sym_op->cipher.data.length = options->test_buffer_size; 121 122 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || 123 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || 124 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) 125 sym_op->cipher.data.length <<= 3; 126 127 sym_op->cipher.data.offset = 0; 128 } 129 130 if (options->test == CPERF_TEST_TYPE_VERIFY) { 131 for (i = 0; i < nb_ops; i++) { 132 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 133 uint8_t *, iv_offset); 134 135 memcpy(iv_ptr, test_vector->cipher_iv.data, 136 test_vector->cipher_iv.length); 137 138 } 139 } 140 141 return 0; 142 } 143 144 static int 145 cperf_set_ops_auth(struct rte_crypto_op **ops, 146 uint32_t src_buf_offset, uint32_t dst_buf_offset, 147 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 148 const struct cperf_options *options, 149 const struct cperf_test_vector *test_vector, 150 uint16_t iv_offset, uint32_t *imix_idx) 151 { 152 uint16_t i; 153 154 for (i = 0; i < nb_ops; i++) { 155 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 156 157 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 158 rte_crypto_op_attach_sym_session(ops[i], sess); 159 160 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 161 src_buf_offset); 162 163 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 164 if (dst_buf_offset == 0) 165 sym_op->m_dst = NULL; 166 else 167 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 168 dst_buf_offset); 169 170 if (test_vector->auth_iv.length) { 171 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 172 uint8_t *, 173 iv_offset); 174 memcpy(iv_ptr, test_vector->auth_iv.data, 175 test_vector->auth_iv.length); 176 } 177 178 /* authentication parameters */ 179 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 180 sym_op->auth.digest.data = test_vector->digest.data; 181 sym_op->auth.digest.phys_addr = 182 test_vector->digest.phys_addr; 183 } else { 184 185 uint32_t offset = options->test_buffer_size; 186 struct rte_mbuf *buf, *tbuf; 187 188 if (options->out_of_place) { 189 buf = sym_op->m_dst; 190 } else { 191 tbuf = sym_op->m_src; 192 while ((tbuf->next != NULL) && 193 (offset >= tbuf->data_len)) { 194 offset -= tbuf->data_len; 195 tbuf = tbuf->next; 196 } 197 /* 198 * If there is not enough room in segment, 199 * place the digest in the next segment 200 */ 201 if ((tbuf->data_len - offset) < options->digest_sz) { 202 tbuf = tbuf->next; 203 offset = 0; 204 } 205 buf = tbuf; 206 } 207 208 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 209 uint8_t *, offset); 210 sym_op->auth.digest.phys_addr = 211 rte_pktmbuf_iova_offset(buf, offset); 212 213 } 214 215 if (options->imix_distribution_count) { 216 sym_op->auth.data.length = 217 options->imix_buffer_sizes[*imix_idx]; 218 *imix_idx = (*imix_idx + 1) % options->pool_sz; 219 } else 220 sym_op->auth.data.length = options->test_buffer_size; 221 222 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 || 223 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 || 224 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3) 225 sym_op->auth.data.length <<= 3; 226 227 sym_op->auth.data.offset = 0; 228 } 229 230 if (options->test == CPERF_TEST_TYPE_VERIFY) { 231 if (test_vector->auth_iv.length) { 232 for (i = 0; i < nb_ops; i++) { 233 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 234 uint8_t *, iv_offset); 235 236 memcpy(iv_ptr, test_vector->auth_iv.data, 237 test_vector->auth_iv.length); 238 } 239 } 240 } 241 return 0; 242 } 243 244 static int 245 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, 246 uint32_t src_buf_offset, uint32_t dst_buf_offset, 247 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 248 const struct cperf_options *options, 249 const struct cperf_test_vector *test_vector, 250 uint16_t iv_offset, uint32_t *imix_idx) 251 { 252 uint16_t i; 253 254 for (i = 0; i < nb_ops; i++) { 255 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 256 257 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 258 rte_crypto_op_attach_sym_session(ops[i], sess); 259 260 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 261 src_buf_offset); 262 263 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 264 if (dst_buf_offset == 0) 265 sym_op->m_dst = NULL; 266 else 267 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 268 dst_buf_offset); 269 270 /* cipher parameters */ 271 if (options->imix_distribution_count) { 272 sym_op->cipher.data.length = 273 options->imix_buffer_sizes[*imix_idx]; 274 *imix_idx = (*imix_idx + 1) % options->pool_sz; 275 } else 276 sym_op->cipher.data.length = options->test_buffer_size; 277 278 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || 279 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || 280 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) 281 sym_op->cipher.data.length <<= 3; 282 283 sym_op->cipher.data.offset = 0; 284 285 /* authentication parameters */ 286 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 287 sym_op->auth.digest.data = test_vector->digest.data; 288 sym_op->auth.digest.phys_addr = 289 test_vector->digest.phys_addr; 290 } else { 291 292 uint32_t offset = options->test_buffer_size; 293 struct rte_mbuf *buf, *tbuf; 294 295 if (options->out_of_place) { 296 buf = sym_op->m_dst; 297 } else { 298 tbuf = sym_op->m_src; 299 while ((tbuf->next != NULL) && 300 (offset >= tbuf->data_len)) { 301 offset -= tbuf->data_len; 302 tbuf = tbuf->next; 303 } 304 /* 305 * If there is not enough room in segment, 306 * place the digest in the next segment 307 */ 308 if ((tbuf->data_len - offset) < options->digest_sz) { 309 tbuf = tbuf->next; 310 offset = 0; 311 } 312 buf = tbuf; 313 } 314 315 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 316 uint8_t *, offset); 317 sym_op->auth.digest.phys_addr = 318 rte_pktmbuf_iova_offset(buf, offset); 319 } 320 321 if (options->imix_distribution_count) { 322 sym_op->auth.data.length = 323 options->imix_buffer_sizes[*imix_idx]; 324 *imix_idx = (*imix_idx + 1) % options->pool_sz; 325 } else 326 sym_op->auth.data.length = options->test_buffer_size; 327 328 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 || 329 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 || 330 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3) 331 sym_op->auth.data.length <<= 3; 332 333 sym_op->auth.data.offset = 0; 334 } 335 336 if (options->test == CPERF_TEST_TYPE_VERIFY) { 337 for (i = 0; i < nb_ops; i++) { 338 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 339 uint8_t *, iv_offset); 340 341 memcpy(iv_ptr, test_vector->cipher_iv.data, 342 test_vector->cipher_iv.length); 343 if (test_vector->auth_iv.length) { 344 /* 345 * Copy IV after the crypto operation and 346 * the cipher IV 347 */ 348 iv_ptr += test_vector->cipher_iv.length; 349 memcpy(iv_ptr, test_vector->auth_iv.data, 350 test_vector->auth_iv.length); 351 } 352 } 353 354 } 355 356 return 0; 357 } 358 359 static int 360 cperf_set_ops_aead(struct rte_crypto_op **ops, 361 uint32_t src_buf_offset, uint32_t dst_buf_offset, 362 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 363 const struct cperf_options *options, 364 const struct cperf_test_vector *test_vector, 365 uint16_t iv_offset, uint32_t *imix_idx) 366 { 367 uint16_t i; 368 /* AAD is placed after the IV */ 369 uint16_t aad_offset = iv_offset + 370 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16); 371 372 for (i = 0; i < nb_ops; i++) { 373 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 374 375 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 376 rte_crypto_op_attach_sym_session(ops[i], sess); 377 378 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 379 src_buf_offset); 380 381 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 382 if (dst_buf_offset == 0) 383 sym_op->m_dst = NULL; 384 else 385 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 386 dst_buf_offset); 387 388 /* AEAD parameters */ 389 if (options->imix_distribution_count) { 390 sym_op->aead.data.length = 391 options->imix_buffer_sizes[*imix_idx]; 392 *imix_idx = (*imix_idx + 1) % options->pool_sz; 393 } else 394 sym_op->aead.data.length = options->test_buffer_size; 395 sym_op->aead.data.offset = 0; 396 397 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i], 398 uint8_t *, aad_offset); 399 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i], 400 aad_offset); 401 402 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) { 403 sym_op->aead.digest.data = test_vector->digest.data; 404 sym_op->aead.digest.phys_addr = 405 test_vector->digest.phys_addr; 406 } else { 407 408 uint32_t offset = sym_op->aead.data.length + 409 sym_op->aead.data.offset; 410 struct rte_mbuf *buf, *tbuf; 411 412 if (options->out_of_place) { 413 buf = sym_op->m_dst; 414 } else { 415 tbuf = sym_op->m_src; 416 while ((tbuf->next != NULL) && 417 (offset >= tbuf->data_len)) { 418 offset -= tbuf->data_len; 419 tbuf = tbuf->next; 420 } 421 /* 422 * If there is not enough room in segment, 423 * place the digest in the next segment 424 */ 425 if ((tbuf->data_len - offset) < options->digest_sz) { 426 tbuf = tbuf->next; 427 offset = 0; 428 } 429 buf = tbuf; 430 } 431 432 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf, 433 uint8_t *, offset); 434 sym_op->aead.digest.phys_addr = 435 rte_pktmbuf_iova_offset(buf, offset); 436 } 437 } 438 439 if (options->test == CPERF_TEST_TYPE_VERIFY) { 440 for (i = 0; i < nb_ops; i++) { 441 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 442 uint8_t *, iv_offset); 443 444 /* 445 * If doing AES-CCM, nonce is copied one byte 446 * after the start of IV field, and AAD is copied 447 * 18 bytes after the start of the AAD field. 448 */ 449 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) { 450 memcpy(iv_ptr + 1, test_vector->aead_iv.data, 451 test_vector->aead_iv.length); 452 453 memcpy(ops[i]->sym->aead.aad.data + 18, 454 test_vector->aad.data, 455 test_vector->aad.length); 456 } else { 457 memcpy(iv_ptr, test_vector->aead_iv.data, 458 test_vector->aead_iv.length); 459 460 memcpy(ops[i]->sym->aead.aad.data, 461 test_vector->aad.data, 462 test_vector->aad.length); 463 } 464 } 465 } 466 467 return 0; 468 } 469 470 static struct rte_cryptodev_sym_session * 471 cperf_create_session(struct rte_mempool *sess_mp, 472 uint8_t dev_id, 473 const struct cperf_options *options, 474 const struct cperf_test_vector *test_vector, 475 uint16_t iv_offset) 476 { 477 struct rte_crypto_sym_xform cipher_xform; 478 struct rte_crypto_sym_xform auth_xform; 479 struct rte_crypto_sym_xform aead_xform; 480 struct rte_cryptodev_sym_session *sess = NULL; 481 482 sess = rte_cryptodev_sym_session_create(sess_mp); 483 /* 484 * cipher only 485 */ 486 if (options->op_type == CPERF_CIPHER_ONLY) { 487 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 488 cipher_xform.next = NULL; 489 cipher_xform.cipher.algo = options->cipher_algo; 490 cipher_xform.cipher.op = options->cipher_op; 491 cipher_xform.cipher.iv.offset = iv_offset; 492 493 /* cipher different than null */ 494 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 495 cipher_xform.cipher.key.data = 496 test_vector->cipher_key.data; 497 cipher_xform.cipher.key.length = 498 test_vector->cipher_key.length; 499 cipher_xform.cipher.iv.length = 500 test_vector->cipher_iv.length; 501 } else { 502 cipher_xform.cipher.key.data = NULL; 503 cipher_xform.cipher.key.length = 0; 504 cipher_xform.cipher.iv.length = 0; 505 } 506 /* create crypto session */ 507 rte_cryptodev_sym_session_init(dev_id, sess, &cipher_xform, 508 sess_mp); 509 /* 510 * auth only 511 */ 512 } else if (options->op_type == CPERF_AUTH_ONLY) { 513 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 514 auth_xform.next = NULL; 515 auth_xform.auth.algo = options->auth_algo; 516 auth_xform.auth.op = options->auth_op; 517 518 /* auth different than null */ 519 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 520 auth_xform.auth.digest_length = 521 options->digest_sz; 522 auth_xform.auth.key.length = 523 test_vector->auth_key.length; 524 auth_xform.auth.key.data = test_vector->auth_key.data; 525 auth_xform.auth.iv.length = 526 test_vector->auth_iv.length; 527 } else { 528 auth_xform.auth.digest_length = 0; 529 auth_xform.auth.key.length = 0; 530 auth_xform.auth.key.data = NULL; 531 auth_xform.auth.iv.length = 0; 532 } 533 /* create crypto session */ 534 rte_cryptodev_sym_session_init(dev_id, sess, &auth_xform, 535 sess_mp); 536 /* 537 * cipher and auth 538 */ 539 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH 540 || options->op_type == CPERF_AUTH_THEN_CIPHER) { 541 /* 542 * cipher 543 */ 544 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 545 cipher_xform.next = NULL; 546 cipher_xform.cipher.algo = options->cipher_algo; 547 cipher_xform.cipher.op = options->cipher_op; 548 cipher_xform.cipher.iv.offset = iv_offset; 549 550 /* cipher different than null */ 551 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 552 cipher_xform.cipher.key.data = 553 test_vector->cipher_key.data; 554 cipher_xform.cipher.key.length = 555 test_vector->cipher_key.length; 556 cipher_xform.cipher.iv.length = 557 test_vector->cipher_iv.length; 558 } else { 559 cipher_xform.cipher.key.data = NULL; 560 cipher_xform.cipher.key.length = 0; 561 cipher_xform.cipher.iv.length = 0; 562 } 563 564 /* 565 * auth 566 */ 567 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 568 auth_xform.next = NULL; 569 auth_xform.auth.algo = options->auth_algo; 570 auth_xform.auth.op = options->auth_op; 571 572 /* auth different than null */ 573 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 574 auth_xform.auth.digest_length = options->digest_sz; 575 auth_xform.auth.iv.length = test_vector->auth_iv.length; 576 auth_xform.auth.key.length = 577 test_vector->auth_key.length; 578 auth_xform.auth.key.data = 579 test_vector->auth_key.data; 580 } else { 581 auth_xform.auth.digest_length = 0; 582 auth_xform.auth.key.length = 0; 583 auth_xform.auth.key.data = NULL; 584 auth_xform.auth.iv.length = 0; 585 } 586 587 /* cipher then auth */ 588 if (options->op_type == CPERF_CIPHER_THEN_AUTH) { 589 cipher_xform.next = &auth_xform; 590 /* create crypto session */ 591 rte_cryptodev_sym_session_init(dev_id, 592 sess, &cipher_xform, sess_mp); 593 } else { /* auth then cipher */ 594 auth_xform.next = &cipher_xform; 595 /* create crypto session */ 596 rte_cryptodev_sym_session_init(dev_id, 597 sess, &auth_xform, sess_mp); 598 } 599 } else { /* options->op_type == CPERF_AEAD */ 600 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD; 601 aead_xform.next = NULL; 602 aead_xform.aead.algo = options->aead_algo; 603 aead_xform.aead.op = options->aead_op; 604 aead_xform.aead.iv.offset = iv_offset; 605 606 aead_xform.aead.key.data = 607 test_vector->aead_key.data; 608 aead_xform.aead.key.length = 609 test_vector->aead_key.length; 610 aead_xform.aead.iv.length = test_vector->aead_iv.length; 611 612 aead_xform.aead.digest_length = options->digest_sz; 613 aead_xform.aead.aad_length = 614 options->aead_aad_sz; 615 616 /* Create crypto session */ 617 rte_cryptodev_sym_session_init(dev_id, 618 sess, &aead_xform, sess_mp); 619 } 620 621 return sess; 622 } 623 624 int 625 cperf_get_op_functions(const struct cperf_options *options, 626 struct cperf_op_fns *op_fns) 627 { 628 memset(op_fns, 0, sizeof(struct cperf_op_fns)); 629 630 op_fns->sess_create = cperf_create_session; 631 632 if (options->op_type == CPERF_AEAD) { 633 op_fns->populate_ops = cperf_set_ops_aead; 634 return 0; 635 } 636 637 if (options->op_type == CPERF_AUTH_THEN_CIPHER 638 || options->op_type == CPERF_CIPHER_THEN_AUTH) { 639 op_fns->populate_ops = cperf_set_ops_cipher_auth; 640 return 0; 641 } 642 if (options->op_type == CPERF_AUTH_ONLY) { 643 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL) 644 op_fns->populate_ops = cperf_set_ops_null_auth; 645 else 646 op_fns->populate_ops = cperf_set_ops_auth; 647 return 0; 648 } 649 if (options->op_type == CPERF_CIPHER_ONLY) { 650 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL) 651 op_fns->populate_ops = cperf_set_ops_null_cipher; 652 else 653 op_fns->populate_ops = cperf_set_ops_cipher; 654 return 0; 655 } 656 657 return -1; 658 } 659