1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2016-2017 Intel Corporation 3 */ 4 5 #include <rte_cryptodev.h> 6 #include <rte_ether.h> 7 #include <rte_ip.h> 8 9 #include "cperf_ops.h" 10 #include "cperf_test_vectors.h" 11 12 static void 13 cperf_set_ops_asym_modex(struct rte_crypto_op **ops, 14 uint32_t src_buf_offset __rte_unused, 15 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops, 16 void *sess, 17 const struct cperf_options *options, 18 const struct cperf_test_vector *test_vector __rte_unused, 19 uint16_t iv_offset __rte_unused, 20 uint32_t *imix_idx __rte_unused, 21 uint64_t *tsc_start __rte_unused) 22 { 23 uint16_t i; 24 25 for (i = 0; i < nb_ops; i++) { 26 struct rte_crypto_asym_op *asym_op = ops[i]->asym; 27 28 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 29 asym_op->modex.base.data = options->modex_data->base.data; 30 asym_op->modex.base.length = options->modex_data->base.len; 31 asym_op->modex.result.data = options->modex_data->result.data; 32 asym_op->modex.result.length = options->modex_data->result.len; 33 rte_crypto_op_attach_asym_session(ops[i], sess); 34 } 35 } 36 37 static void 38 cperf_set_ops_asym_ecdsa(struct rte_crypto_op **ops, 39 uint32_t src_buf_offset __rte_unused, 40 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops, 41 void *sess, 42 const struct cperf_options *options, 43 const struct cperf_test_vector *test_vector __rte_unused, 44 uint16_t iv_offset __rte_unused, 45 uint32_t *imix_idx __rte_unused, 46 uint64_t *tsc_start __rte_unused) 47 { 48 uint16_t i; 49 50 for (i = 0; i < nb_ops; i++) { 51 struct rte_crypto_asym_op *asym_op = ops[i]->asym; 52 53 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 54 rte_crypto_op_attach_asym_session(ops[i], sess); 55 56 asym_op->ecdsa.op_type = options->asym_op_type; 57 asym_op->ecdsa.message.data = options->secp256r1_data->message.data; 58 asym_op->ecdsa.message.length = options->secp256r1_data->message.length; 59 60 asym_op->ecdsa.k.data = options->secp256r1_data->k.data; 61 asym_op->ecdsa.k.length = options->secp256r1_data->k.length; 62 63 asym_op->ecdsa.r.data = options->secp256r1_data->sign_r.data; 64 asym_op->ecdsa.r.length = options->secp256r1_data->sign_r.length; 65 asym_op->ecdsa.s.data = options->secp256r1_data->sign_s.data; 66 asym_op->ecdsa.s.length = options->secp256r1_data->sign_s.length; 67 } 68 } 69 70 static void 71 cperf_set_ops_asym_sm2(struct rte_crypto_op **ops, 72 uint32_t src_buf_offset __rte_unused, 73 uint32_t dst_buf_offset __rte_unused, uint16_t nb_ops, 74 void *sess, 75 const struct cperf_options *options, 76 const struct cperf_test_vector *test_vector __rte_unused, 77 uint16_t iv_offset __rte_unused, 78 uint32_t *imix_idx __rte_unused, 79 uint64_t *tsc_start __rte_unused) 80 { 81 uint16_t i; 82 83 for (i = 0; i < nb_ops; i++) { 84 struct rte_crypto_asym_op *asym_op = ops[i]->asym; 85 86 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 87 rte_crypto_op_attach_asym_session(ops[i], sess); 88 89 /* Populate op with operational details */ 90 asym_op->sm2.hash = options->asym_hash_alg; 91 92 asym_op->sm2.op_type = options->asym_op_type; 93 asym_op->sm2.message.data = options->sm2_data->message.data; 94 asym_op->sm2.message.length = options->sm2_data->message.length; 95 asym_op->sm2.cipher.data = options->sm2_data->cipher.data; 96 asym_op->sm2.cipher.length = options->sm2_data->cipher.length; 97 asym_op->sm2.id.data = options->sm2_data->id.data; 98 asym_op->sm2.id.length = options->sm2_data->id.length; 99 100 asym_op->sm2.k.data = options->sm2_data->k.data; 101 asym_op->sm2.k.length = options->sm2_data->k.length; 102 103 asym_op->sm2.r.data = options->sm2_data->sign_r.data; 104 asym_op->sm2.r.length = options->sm2_data->sign_r.length; 105 asym_op->sm2.s.data = options->sm2_data->sign_s.data; 106 asym_op->sm2.s.length = options->sm2_data->sign_s.length; 107 } 108 } 109 110 111 #ifdef RTE_LIB_SECURITY 112 static void 113 test_ipsec_vec_populate(struct rte_mbuf *m, const struct cperf_options *options, 114 const struct cperf_test_vector *test_vector) 115 { 116 struct rte_ipv4_hdr *ip = rte_pktmbuf_mtod(m, struct rte_ipv4_hdr *); 117 118 if (options->is_outbound) { 119 memcpy(ip, test_vector->plaintext.data, sizeof(struct rte_ipv4_hdr)); 120 ip->total_length = rte_cpu_to_be_16(m->pkt_len); 121 } 122 } 123 124 static void 125 cperf_set_ops_security(struct rte_crypto_op **ops, 126 uint32_t src_buf_offset __rte_unused, 127 uint32_t dst_buf_offset __rte_unused, 128 uint16_t nb_ops, void *sess, 129 const struct cperf_options *options, 130 const struct cperf_test_vector *test_vector, 131 uint16_t iv_offset __rte_unused, uint32_t *imix_idx, 132 uint64_t *tsc_start) 133 { 134 uint16_t i; 135 136 for (i = 0; i < nb_ops; i++) { 137 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 138 uint32_t buf_sz; 139 140 uint32_t *per_pkt_hfn = rte_crypto_op_ctod_offset(ops[i], 141 uint32_t *, iv_offset); 142 *per_pkt_hfn = options->pdcp_ses_hfn_en ? 0 : PDCP_DEFAULT_HFN; 143 144 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 145 rte_security_attach_session(ops[i], sess); 146 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 147 src_buf_offset); 148 149 if (options->op_type == CPERF_PDCP) { 150 sym_op->m_src->buf_len = options->segment_sz; 151 sym_op->m_src->data_len = options->test_buffer_size; 152 sym_op->m_src->pkt_len = sym_op->m_src->data_len; 153 } 154 155 if (options->op_type == CPERF_DOCSIS) { 156 if (options->imix_distribution_count) { 157 buf_sz = options->imix_buffer_sizes[*imix_idx]; 158 *imix_idx = (*imix_idx + 1) % options->pool_sz; 159 } else 160 buf_sz = options->test_buffer_size; 161 162 sym_op->m_src->buf_len = options->segment_sz; 163 sym_op->m_src->data_len = buf_sz; 164 sym_op->m_src->pkt_len = buf_sz; 165 166 /* DOCSIS header is not CRC'ed */ 167 sym_op->auth.data.offset = options->docsis_hdr_sz; 168 sym_op->auth.data.length = buf_sz - 169 sym_op->auth.data.offset - RTE_ETHER_CRC_LEN; 170 /* 171 * DOCSIS header and SRC and DST MAC addresses are not 172 * ciphered 173 */ 174 sym_op->cipher.data.offset = sym_op->auth.data.offset + 175 RTE_ETHER_HDR_LEN - RTE_ETHER_TYPE_LEN; 176 sym_op->cipher.data.length = buf_sz - 177 sym_op->cipher.data.offset; 178 } 179 180 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 181 if (dst_buf_offset == 0) 182 sym_op->m_dst = NULL; 183 else 184 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 185 dst_buf_offset); 186 } 187 188 RTE_SET_USED(tsc_start); 189 RTE_SET_USED(test_vector); 190 } 191 192 static void 193 cperf_set_ops_security_ipsec(struct rte_crypto_op **ops, 194 uint32_t src_buf_offset __rte_unused, 195 uint32_t dst_buf_offset __rte_unused, 196 uint16_t nb_ops, void *sess, 197 const struct cperf_options *options, 198 const struct cperf_test_vector *test_vector, 199 uint16_t iv_offset __rte_unused, uint32_t *imix_idx, 200 uint64_t *tsc_start) 201 { 202 const uint32_t test_buffer_size = options->test_buffer_size; 203 uint64_t tsc_start_temp, tsc_end_temp; 204 uint16_t i = 0; 205 206 RTE_SET_USED(imix_idx); 207 208 for (i = 0; i < nb_ops; i++) { 209 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 210 struct rte_mbuf *m = sym_op->m_src; 211 uint32_t offset = test_buffer_size; 212 213 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 214 rte_security_attach_session(ops[i], sess); 215 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + src_buf_offset); 216 sym_op->m_src->pkt_len = test_buffer_size; 217 218 while ((m->next != NULL) && (offset >= m->data_len)) { 219 offset -= m->data_len; 220 m = m->next; 221 } 222 m->data_len = offset; 223 /* 224 * If there is not enough room in segment, 225 * place the digest in the next segment 226 */ 227 if (rte_pktmbuf_tailroom(m) < options->digest_sz) { 228 m = m->next; 229 offset = 0; 230 } 231 m->next = NULL; 232 233 sym_op->m_dst = NULL; 234 } 235 236 if (options->test_file != NULL) 237 return; 238 239 tsc_start_temp = rte_rdtsc_precise(); 240 241 for (i = 0; i < nb_ops; i++) { 242 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 243 struct rte_mbuf *m = sym_op->m_src; 244 245 test_ipsec_vec_populate(m, options, test_vector); 246 } 247 248 tsc_end_temp = rte_rdtsc_precise(); 249 *tsc_start += tsc_end_temp - tsc_start_temp; 250 } 251 252 static void 253 cperf_set_ops_security_tls(struct rte_crypto_op **ops, 254 uint32_t src_buf_offset __rte_unused, 255 uint32_t dst_buf_offset __rte_unused, 256 uint16_t nb_ops, void *sess, 257 const struct cperf_options *options, 258 const struct cperf_test_vector *test_vector, 259 uint16_t iv_offset __rte_unused, uint32_t *imix_idx, 260 uint64_t *tsc_start) 261 { 262 const uint32_t test_buffer_size = options->test_buffer_size; 263 uint16_t i = 0; 264 265 RTE_SET_USED(imix_idx); 266 RTE_SET_USED(tsc_start); 267 RTE_SET_USED(test_vector); 268 269 for (i = 0; i < nb_ops; i++) { 270 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 271 struct rte_mbuf *m = sym_op->m_src; 272 uint32_t offset = test_buffer_size; 273 274 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 275 ops[i]->param1.tls_record.content_type = 0x17; 276 rte_security_attach_session(ops[i], sess); 277 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + src_buf_offset); 278 sym_op->m_src->pkt_len = test_buffer_size; 279 280 while ((m->next != NULL) && (offset >= m->data_len)) { 281 offset -= m->data_len; 282 m = m->next; 283 } 284 m->data_len = offset; 285 /* 286 * If there is not enough room in segment, 287 * place the digest in the next segment 288 */ 289 if ((rte_pktmbuf_tailroom(m)) < options->digest_sz) { 290 m = m->next; 291 m->data_len = 0; 292 } 293 m->next = NULL; 294 295 sym_op->m_dst = NULL; 296 } 297 } 298 #endif 299 300 static void 301 cperf_set_ops_null_cipher(struct rte_crypto_op **ops, 302 uint32_t src_buf_offset, uint32_t dst_buf_offset, 303 uint16_t nb_ops, void *sess, 304 const struct cperf_options *options, 305 const struct cperf_test_vector *test_vector __rte_unused, 306 uint16_t iv_offset __rte_unused, uint32_t *imix_idx, 307 uint64_t *tsc_start __rte_unused) 308 { 309 uint16_t i; 310 311 for (i = 0; i < nb_ops; i++) { 312 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 313 314 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 315 rte_crypto_op_attach_sym_session(ops[i], sess); 316 317 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 318 src_buf_offset); 319 320 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 321 if (dst_buf_offset == 0) 322 sym_op->m_dst = NULL; 323 else 324 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 325 dst_buf_offset); 326 327 /* cipher parameters */ 328 if (options->imix_distribution_count) { 329 sym_op->cipher.data.length = 330 options->imix_buffer_sizes[*imix_idx]; 331 *imix_idx = (*imix_idx + 1) % options->pool_sz; 332 } else 333 sym_op->cipher.data.length = options->test_buffer_size; 334 sym_op->cipher.data.offset = 0; 335 } 336 } 337 338 static void 339 cperf_set_ops_null_auth(struct rte_crypto_op **ops, 340 uint32_t src_buf_offset, uint32_t dst_buf_offset, 341 uint16_t nb_ops, void *sess, 342 const struct cperf_options *options, 343 const struct cperf_test_vector *test_vector __rte_unused, 344 uint16_t iv_offset __rte_unused, uint32_t *imix_idx, 345 uint64_t *tsc_start __rte_unused) 346 { 347 uint16_t i; 348 349 for (i = 0; i < nb_ops; i++) { 350 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 351 352 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 353 rte_crypto_op_attach_sym_session(ops[i], sess); 354 355 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 356 src_buf_offset); 357 358 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 359 if (dst_buf_offset == 0) 360 sym_op->m_dst = NULL; 361 else 362 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 363 dst_buf_offset); 364 365 /* auth parameters */ 366 if (options->imix_distribution_count) { 367 sym_op->auth.data.length = 368 options->imix_buffer_sizes[*imix_idx]; 369 *imix_idx = (*imix_idx + 1) % options->pool_sz; 370 } else 371 sym_op->auth.data.length = options->test_buffer_size; 372 sym_op->auth.data.offset = 0; 373 } 374 } 375 376 static void 377 cperf_set_ops_cipher(struct rte_crypto_op **ops, 378 uint32_t src_buf_offset, uint32_t dst_buf_offset, 379 uint16_t nb_ops, void *sess, 380 const struct cperf_options *options, 381 const struct cperf_test_vector *test_vector, 382 uint16_t iv_offset, uint32_t *imix_idx, 383 uint64_t *tsc_start __rte_unused) 384 { 385 uint16_t i; 386 387 for (i = 0; i < nb_ops; i++) { 388 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 389 390 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 391 rte_crypto_op_attach_sym_session(ops[i], sess); 392 393 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 394 src_buf_offset); 395 396 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 397 if (dst_buf_offset == 0) 398 sym_op->m_dst = NULL; 399 else 400 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 401 dst_buf_offset); 402 403 /* cipher parameters */ 404 if (options->imix_distribution_count) { 405 sym_op->cipher.data.length = 406 options->imix_buffer_sizes[*imix_idx]; 407 *imix_idx = (*imix_idx + 1) % options->pool_sz; 408 } else 409 sym_op->cipher.data.length = options->test_buffer_size; 410 411 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || 412 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || 413 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) 414 sym_op->cipher.data.length <<= 3; 415 416 sym_op->cipher.data.offset = 0; 417 } 418 419 if (options->test == CPERF_TEST_TYPE_VERIFY) { 420 for (i = 0; i < nb_ops; i++) { 421 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 422 uint8_t *, iv_offset); 423 424 memcpy(iv_ptr, test_vector->cipher_iv.data, 425 test_vector->cipher_iv.length); 426 427 } 428 } 429 } 430 431 static void 432 cperf_set_ops_auth(struct rte_crypto_op **ops, 433 uint32_t src_buf_offset, uint32_t dst_buf_offset, 434 uint16_t nb_ops, void *sess, 435 const struct cperf_options *options, 436 const struct cperf_test_vector *test_vector, 437 uint16_t iv_offset, uint32_t *imix_idx, 438 uint64_t *tsc_start __rte_unused) 439 { 440 uint16_t i; 441 442 for (i = 0; i < nb_ops; i++) { 443 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 444 445 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 446 rte_crypto_op_attach_sym_session(ops[i], sess); 447 448 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 449 src_buf_offset); 450 451 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 452 if (dst_buf_offset == 0) 453 sym_op->m_dst = NULL; 454 else 455 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 456 dst_buf_offset); 457 458 if (test_vector->auth_iv.length) { 459 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 460 uint8_t *, 461 iv_offset); 462 memcpy(iv_ptr, test_vector->auth_iv.data, 463 test_vector->auth_iv.length); 464 } 465 466 /* authentication parameters */ 467 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 468 sym_op->auth.digest.data = test_vector->digest.data; 469 sym_op->auth.digest.phys_addr = 470 test_vector->digest.phys_addr; 471 } else { 472 473 uint32_t offset = options->test_buffer_size; 474 struct rte_mbuf *buf, *tbuf; 475 476 if (options->out_of_place) { 477 buf = sym_op->m_dst; 478 } else { 479 tbuf = sym_op->m_src; 480 while ((tbuf->next != NULL) && 481 (offset >= tbuf->data_len)) { 482 offset -= tbuf->data_len; 483 tbuf = tbuf->next; 484 } 485 /* 486 * If there is not enough room in segment, 487 * place the digest in the next segment 488 */ 489 if ((tbuf->data_len - offset) < options->digest_sz) { 490 tbuf = tbuf->next; 491 offset = 0; 492 } 493 buf = tbuf; 494 } 495 496 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 497 uint8_t *, offset); 498 sym_op->auth.digest.phys_addr = 499 rte_pktmbuf_iova_offset(buf, offset); 500 501 } 502 503 if (options->imix_distribution_count) { 504 sym_op->auth.data.length = 505 options->imix_buffer_sizes[*imix_idx]; 506 *imix_idx = (*imix_idx + 1) % options->pool_sz; 507 } else 508 sym_op->auth.data.length = options->test_buffer_size; 509 510 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 || 511 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 || 512 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3) 513 sym_op->auth.data.length <<= 3; 514 515 sym_op->auth.data.offset = 0; 516 } 517 518 if (options->test == CPERF_TEST_TYPE_VERIFY) { 519 if (test_vector->auth_iv.length) { 520 for (i = 0; i < nb_ops; i++) { 521 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 522 uint8_t *, iv_offset); 523 524 memcpy(iv_ptr, test_vector->auth_iv.data, 525 test_vector->auth_iv.length); 526 } 527 } 528 } 529 } 530 531 static void 532 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, 533 uint32_t src_buf_offset, uint32_t dst_buf_offset, 534 uint16_t nb_ops, void *sess, 535 const struct cperf_options *options, 536 const struct cperf_test_vector *test_vector, 537 uint16_t iv_offset, uint32_t *imix_idx, 538 uint64_t *tsc_start __rte_unused) 539 { 540 uint16_t i; 541 542 for (i = 0; i < nb_ops; i++) { 543 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 544 545 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 546 rte_crypto_op_attach_sym_session(ops[i], sess); 547 548 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 549 src_buf_offset); 550 551 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 552 if (dst_buf_offset == 0) 553 sym_op->m_dst = NULL; 554 else 555 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 556 dst_buf_offset); 557 558 /* cipher parameters */ 559 if (options->imix_distribution_count) { 560 sym_op->cipher.data.length = 561 options->imix_buffer_sizes[*imix_idx]; 562 *imix_idx = (*imix_idx + 1) % options->pool_sz; 563 } else 564 sym_op->cipher.data.length = options->test_buffer_size; 565 566 if ((options->auth_op == RTE_CRYPTO_AUTH_OP_GENERATE) && 567 (options->op_type == CPERF_AUTH_THEN_CIPHER)) 568 sym_op->cipher.data.length += options->digest_sz; 569 570 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 || 571 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 || 572 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3) 573 sym_op->cipher.data.length <<= 3; 574 575 sym_op->cipher.data.offset = 0; 576 577 /* authentication parameters */ 578 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 579 sym_op->auth.digest.data = test_vector->digest.data; 580 sym_op->auth.digest.phys_addr = 581 test_vector->digest.phys_addr; 582 } else { 583 584 uint32_t offset = options->test_buffer_size; 585 struct rte_mbuf *buf, *tbuf; 586 587 if (options->out_of_place) { 588 buf = sym_op->m_dst; 589 } else { 590 tbuf = sym_op->m_src; 591 while ((tbuf->next != NULL) && 592 (offset >= tbuf->data_len)) { 593 offset -= tbuf->data_len; 594 tbuf = tbuf->next; 595 } 596 /* 597 * If there is not enough room in segment, 598 * place the digest in the next segment 599 */ 600 if ((tbuf->data_len - offset) < options->digest_sz) { 601 tbuf = tbuf->next; 602 offset = 0; 603 } 604 buf = tbuf; 605 } 606 607 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 608 uint8_t *, offset); 609 sym_op->auth.digest.phys_addr = 610 rte_pktmbuf_iova_offset(buf, offset); 611 } 612 613 if (options->imix_distribution_count) { 614 sym_op->auth.data.length = 615 options->imix_buffer_sizes[*imix_idx]; 616 *imix_idx = (*imix_idx + 1) % options->pool_sz; 617 } else 618 sym_op->auth.data.length = options->test_buffer_size; 619 620 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 || 621 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 || 622 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3) 623 sym_op->auth.data.length <<= 3; 624 625 sym_op->auth.data.offset = 0; 626 } 627 628 if (options->test == CPERF_TEST_TYPE_VERIFY) { 629 for (i = 0; i < nb_ops; i++) { 630 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 631 uint8_t *, iv_offset); 632 633 memcpy(iv_ptr, test_vector->cipher_iv.data, 634 test_vector->cipher_iv.length); 635 if (test_vector->auth_iv.length) { 636 /* 637 * Copy IV after the crypto operation and 638 * the cipher IV 639 */ 640 iv_ptr += test_vector->cipher_iv.length; 641 memcpy(iv_ptr, test_vector->auth_iv.data, 642 test_vector->auth_iv.length); 643 } 644 } 645 646 } 647 } 648 649 static void 650 cperf_set_ops_aead(struct rte_crypto_op **ops, 651 uint32_t src_buf_offset, uint32_t dst_buf_offset, 652 uint16_t nb_ops, void *sess, 653 const struct cperf_options *options, 654 const struct cperf_test_vector *test_vector, 655 uint16_t iv_offset, uint32_t *imix_idx, 656 uint64_t *tsc_start __rte_unused) 657 { 658 uint16_t i; 659 /* AAD is placed after the IV */ 660 uint16_t aad_offset = iv_offset + 661 RTE_ALIGN_CEIL(test_vector->aead_iv.length, 16); 662 663 for (i = 0; i < nb_ops; i++) { 664 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 665 666 ops[i]->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 667 rte_crypto_op_attach_sym_session(ops[i], sess); 668 669 sym_op->m_src = (struct rte_mbuf *)((uint8_t *)ops[i] + 670 src_buf_offset); 671 672 /* Set dest mbuf to NULL if out-of-place (dst_buf_offset = 0) */ 673 if (dst_buf_offset == 0) 674 sym_op->m_dst = NULL; 675 else 676 sym_op->m_dst = (struct rte_mbuf *)((uint8_t *)ops[i] + 677 dst_buf_offset); 678 679 /* AEAD parameters */ 680 if (options->imix_distribution_count) { 681 sym_op->aead.data.length = 682 options->imix_buffer_sizes[*imix_idx]; 683 *imix_idx = (*imix_idx + 1) % options->pool_sz; 684 } else 685 sym_op->aead.data.length = options->test_buffer_size; 686 sym_op->aead.data.offset = 0; 687 688 sym_op->aead.aad.data = rte_crypto_op_ctod_offset(ops[i], 689 uint8_t *, aad_offset); 690 sym_op->aead.aad.phys_addr = rte_crypto_op_ctophys_offset(ops[i], 691 aad_offset); 692 693 if (options->aead_op == RTE_CRYPTO_AEAD_OP_DECRYPT) { 694 sym_op->aead.digest.data = test_vector->digest.data; 695 sym_op->aead.digest.phys_addr = 696 test_vector->digest.phys_addr; 697 } else { 698 699 uint32_t offset = sym_op->aead.data.length + 700 sym_op->aead.data.offset; 701 struct rte_mbuf *buf, *tbuf; 702 703 if (options->out_of_place) { 704 buf = sym_op->m_dst; 705 } else { 706 tbuf = sym_op->m_src; 707 while ((tbuf->next != NULL) && 708 (offset >= tbuf->data_len)) { 709 offset -= tbuf->data_len; 710 tbuf = tbuf->next; 711 } 712 /* 713 * If there is not enough room in segment, 714 * place the digest in the next segment 715 */ 716 if ((tbuf->data_len - offset) < options->digest_sz) { 717 tbuf = tbuf->next; 718 offset = 0; 719 } 720 buf = tbuf; 721 } 722 723 sym_op->aead.digest.data = rte_pktmbuf_mtod_offset(buf, 724 uint8_t *, offset); 725 sym_op->aead.digest.phys_addr = 726 rte_pktmbuf_iova_offset(buf, offset); 727 } 728 } 729 730 if ((options->test == CPERF_TEST_TYPE_VERIFY) || 731 (options->test == CPERF_TEST_TYPE_LATENCY)) { 732 for (i = 0; i < nb_ops; i++) { 733 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i], 734 uint8_t *, iv_offset); 735 736 /* 737 * If doing AES-CCM, nonce is copied one byte 738 * after the start of IV field, and AAD is copied 739 * 18 bytes after the start of the AAD field. 740 */ 741 if (options->aead_algo == RTE_CRYPTO_AEAD_AES_CCM) { 742 memcpy(iv_ptr + 1, test_vector->aead_iv.data, 743 test_vector->aead_iv.length); 744 745 memcpy(ops[i]->sym->aead.aad.data + 18, 746 test_vector->aad.data, 747 test_vector->aad.length); 748 } else { 749 memcpy(iv_ptr, test_vector->aead_iv.data, 750 test_vector->aead_iv.length); 751 752 memcpy(ops[i]->sym->aead.aad.data, 753 test_vector->aad.data, 754 test_vector->aad.length); 755 } 756 } 757 } 758 } 759 760 static void * 761 create_ipsec_session(struct rte_mempool *sess_mp, 762 uint8_t dev_id, 763 const struct cperf_options *options, 764 const struct cperf_test_vector *test_vector, 765 uint16_t iv_offset) 766 { 767 struct rte_crypto_sym_xform auth_xform = {0}; 768 struct rte_crypto_sym_xform *crypto_xform; 769 struct rte_crypto_sym_xform xform = {0}; 770 771 if (options->aead_algo != 0) { 772 /* Setup AEAD Parameters */ 773 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD; 774 xform.next = NULL; 775 xform.aead.algo = options->aead_algo; 776 xform.aead.op = options->aead_op; 777 xform.aead.iv.offset = iv_offset; 778 xform.aead.key.data = test_vector->aead_key.data; 779 xform.aead.key.length = test_vector->aead_key.length; 780 xform.aead.iv.length = test_vector->aead_iv.length; 781 xform.aead.digest_length = options->digest_sz; 782 xform.aead.aad_length = options->aead_aad_sz; 783 crypto_xform = &xform; 784 } else if (options->cipher_algo != 0 && options->auth_algo != 0) { 785 /* Setup Cipher Parameters */ 786 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 787 xform.cipher.algo = options->cipher_algo; 788 xform.cipher.op = options->cipher_op; 789 xform.cipher.iv.offset = iv_offset; 790 xform.cipher.iv.length = test_vector->cipher_iv.length; 791 /* cipher different than null */ 792 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 793 xform.cipher.key.data = test_vector->cipher_key.data; 794 xform.cipher.key.length = 795 test_vector->cipher_key.length; 796 } else { 797 xform.cipher.key.data = NULL; 798 xform.cipher.key.length = 0; 799 } 800 801 /* Setup Auth Parameters */ 802 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 803 auth_xform.auth.algo = options->auth_algo; 804 auth_xform.auth.op = options->auth_op; 805 auth_xform.auth.iv.offset = iv_offset + 806 xform.cipher.iv.length; 807 /* auth different than null */ 808 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 809 auth_xform.auth.digest_length = options->digest_sz; 810 auth_xform.auth.key.length = 811 test_vector->auth_key.length; 812 auth_xform.auth.key.data = test_vector->auth_key.data; 813 auth_xform.auth.iv.length = test_vector->auth_iv.length; 814 } else { 815 auth_xform.auth.digest_length = 0; 816 auth_xform.auth.key.length = 0; 817 auth_xform.auth.key.data = NULL; 818 auth_xform.auth.iv.length = 0; 819 } 820 821 if (options->is_outbound) { 822 crypto_xform = &xform; 823 xform.next = &auth_xform; 824 auth_xform.next = NULL; 825 } else { 826 crypto_xform = &auth_xform; 827 auth_xform.next = &xform; 828 xform.next = NULL; 829 } 830 } else { 831 return NULL; 832 } 833 834 #define CPERF_IPSEC_SRC_IP 0x01010101 835 #define CPERF_IPSEC_DST_IP 0x02020202 836 #define CPERF_IPSEC_SALT 0x0 837 #define CPERF_IPSEC_DEFTTL 64 838 struct rte_security_ipsec_tunnel_param tunnel = { 839 .type = RTE_SECURITY_IPSEC_TUNNEL_IPV4, 840 {.ipv4 = { 841 .src_ip = { .s_addr = CPERF_IPSEC_SRC_IP}, 842 .dst_ip = { .s_addr = CPERF_IPSEC_DST_IP}, 843 .dscp = 0, 844 .df = 0, 845 .ttl = CPERF_IPSEC_DEFTTL, 846 } }, 847 }; 848 struct rte_security_session_conf sess_conf = { 849 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, 850 .protocol = RTE_SECURITY_PROTOCOL_IPSEC, 851 {.ipsec = { 852 .spi = rte_lcore_id() + 1, 853 /**< For testing sake, lcore_id is taken as SPI so that 854 * for every core a different session is created. 855 */ 856 .salt = CPERF_IPSEC_SALT, 857 .options = { 0 }, 858 .replay_win_sz = 0, 859 .proto = RTE_SECURITY_IPSEC_SA_PROTO_ESP, 860 .mode = RTE_SECURITY_IPSEC_SA_MODE_TUNNEL, 861 .tunnel = tunnel, 862 } }, 863 .userdata = NULL, 864 .crypto_xform = crypto_xform, 865 }; 866 867 if (options->is_outbound) 868 sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_EGRESS; 869 else 870 sess_conf.ipsec.direction = RTE_SECURITY_IPSEC_SA_DIR_INGRESS; 871 872 void *ctx = rte_cryptodev_get_sec_ctx(dev_id); 873 874 /* Create security session */ 875 return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp); 876 } 877 878 static void * 879 create_tls_session(struct rte_mempool *sess_mp, 880 uint8_t dev_id, 881 const struct cperf_options *options, 882 const struct cperf_test_vector *test_vector, 883 uint16_t iv_offset) 884 { 885 struct rte_crypto_sym_xform auth_xform = {0}; 886 struct rte_crypto_sym_xform *crypto_xform; 887 struct rte_crypto_sym_xform xform = {0}; 888 889 if (options->aead_algo != 0) { 890 /* Setup AEAD Parameters */ 891 xform.type = RTE_CRYPTO_SYM_XFORM_AEAD; 892 xform.next = NULL; 893 xform.aead.algo = options->aead_algo; 894 xform.aead.op = options->aead_op; 895 xform.aead.iv.offset = iv_offset; 896 xform.aead.key.data = test_vector->aead_key.data; 897 xform.aead.key.length = test_vector->aead_key.length; 898 xform.aead.iv.length = test_vector->aead_iv.length; 899 xform.aead.digest_length = options->digest_sz; 900 xform.aead.aad_length = options->aead_aad_sz; 901 crypto_xform = &xform; 902 } else if (options->cipher_algo != 0 && options->auth_algo != 0) { 903 /* Setup Cipher Parameters */ 904 xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 905 xform.cipher.algo = options->cipher_algo; 906 xform.cipher.op = options->cipher_op; 907 xform.cipher.iv.offset = iv_offset; 908 xform.cipher.iv.length = test_vector->cipher_iv.length; 909 /* cipher different than null */ 910 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 911 xform.cipher.key.data = test_vector->cipher_key.data; 912 xform.cipher.key.length = test_vector->cipher_key.length; 913 } else { 914 xform.cipher.key.data = NULL; 915 xform.cipher.key.length = 0; 916 } 917 918 /* Setup Auth Parameters */ 919 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 920 auth_xform.auth.algo = options->auth_algo; 921 auth_xform.auth.op = options->auth_op; 922 auth_xform.auth.iv.offset = iv_offset + xform.cipher.iv.length; 923 /* auth different than null */ 924 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 925 auth_xform.auth.digest_length = options->digest_sz; 926 auth_xform.auth.key.length = test_vector->auth_key.length; 927 auth_xform.auth.key.data = test_vector->auth_key.data; 928 auth_xform.auth.iv.length = test_vector->auth_iv.length; 929 } else { 930 auth_xform.auth.digest_length = 0; 931 auth_xform.auth.key.length = 0; 932 auth_xform.auth.key.data = NULL; 933 auth_xform.auth.iv.length = 0; 934 } 935 936 if (options->is_outbound) { 937 /* Currently supporting AUTH then Encrypt mode only for TLS. */ 938 crypto_xform = &auth_xform; 939 auth_xform.next = &xform; 940 xform.next = NULL; 941 } else { 942 crypto_xform = &xform; 943 xform.next = &auth_xform; 944 auth_xform.next = NULL; 945 } 946 } else { 947 return NULL; 948 } 949 950 struct rte_security_tls_record_sess_options opts = { 951 .iv_gen_disable = 0, 952 .extra_padding_enable = 0, 953 }; 954 struct rte_security_session_conf sess_conf = { 955 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, 956 .protocol = RTE_SECURITY_PROTOCOL_TLS_RECORD, 957 {.tls_record = { 958 .ver = RTE_SECURITY_VERSION_TLS_1_2, 959 .options = opts, 960 } }, 961 .userdata = NULL, 962 .crypto_xform = crypto_xform, 963 }; 964 if (options->tls_version) 965 sess_conf.tls_record.ver = options->tls_version; 966 967 if (options->is_outbound) 968 sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_WRITE; 969 else 970 sess_conf.tls_record.type = RTE_SECURITY_TLS_SESS_TYPE_READ; 971 972 void *ctx = rte_cryptodev_get_sec_ctx(dev_id); 973 974 /* Create security session */ 975 return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp); 976 } 977 978 static void * 979 cperf_create_session(struct rte_mempool *sess_mp, 980 uint8_t dev_id, 981 const struct cperf_options *options, 982 const struct cperf_test_vector *test_vector, 983 uint16_t iv_offset) 984 { 985 struct rte_crypto_sym_xform cipher_xform; 986 struct rte_crypto_sym_xform auth_xform; 987 struct rte_crypto_sym_xform aead_xform; 988 void *sess = NULL; 989 void *asym_sess = NULL; 990 struct rte_crypto_asym_xform xform = {0}; 991 int ret; 992 993 if (options->op_type == CPERF_ASYM_MODEX) { 994 xform.next = NULL; 995 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX; 996 xform.modex.modulus.data = options->modex_data->modulus.data; 997 xform.modex.modulus.length = options->modex_data->modulus.len; 998 xform.modex.exponent.data = options->modex_data->exponent.data; 999 xform.modex.exponent.length = options->modex_data->exponent.len; 1000 1001 ret = rte_cryptodev_asym_session_create(dev_id, &xform, 1002 sess_mp, &asym_sess); 1003 if (ret < 0) { 1004 RTE_LOG(ERR, USER1, "Asym session create failed\n"); 1005 return NULL; 1006 } 1007 return asym_sess; 1008 } 1009 1010 if (options->op_type == CPERF_ASYM_SECP256R1) { 1011 xform.next = NULL; 1012 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_ECDSA; 1013 xform.ec.curve_id = options->secp256r1_data->curve; 1014 xform.ec.pkey.data = options->secp256r1_data->pkey.data; 1015 xform.ec.pkey.length = options->secp256r1_data->pkey.length; 1016 xform.ec.q.x.data = options->secp256r1_data->pubkey_qx.data; 1017 xform.ec.q.x.length = options->secp256r1_data->pubkey_qx.length; 1018 xform.ec.q.y.data = options->secp256r1_data->pubkey_qy.data; 1019 xform.ec.q.y.length = options->secp256r1_data->pubkey_qy.length; 1020 1021 ret = rte_cryptodev_asym_session_create(dev_id, &xform, 1022 sess_mp, &asym_sess); 1023 if (ret < 0) { 1024 RTE_LOG(ERR, USER1, "ECDSA Asym session create failed\n"); 1025 return NULL; 1026 } 1027 1028 return asym_sess; 1029 } 1030 1031 if (options->op_type == CPERF_ASYM_SM2) { 1032 xform.next = NULL; 1033 xform.xform_type = RTE_CRYPTO_ASYM_XFORM_SM2; 1034 xform.ec.curve_id = options->sm2_data->curve; 1035 xform.ec.pkey.data = options->sm2_data->pkey.data; 1036 xform.ec.pkey.length = options->sm2_data->pkey.length; 1037 xform.ec.q.x.data = options->sm2_data->pubkey_qx.data; 1038 xform.ec.q.x.length = options->sm2_data->pubkey_qx.length; 1039 xform.ec.q.y.data = options->sm2_data->pubkey_qy.data; 1040 xform.ec.q.y.length = options->sm2_data->pubkey_qy.length; 1041 1042 ret = rte_cryptodev_asym_session_create(dev_id, &xform, 1043 sess_mp, &asym_sess); 1044 if (ret < 0) { 1045 RTE_LOG(ERR, USER1, "SM2 Asym session create failed\n"); 1046 return NULL; 1047 } 1048 1049 return asym_sess; 1050 } 1051 #ifdef RTE_LIB_SECURITY 1052 /* 1053 * security only 1054 */ 1055 if (options->op_type == CPERF_PDCP) { 1056 /* Setup Cipher Parameters */ 1057 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 1058 cipher_xform.next = NULL; 1059 cipher_xform.cipher.algo = options->cipher_algo; 1060 cipher_xform.cipher.op = options->cipher_op; 1061 cipher_xform.cipher.iv.offset = iv_offset; 1062 cipher_xform.cipher.iv.length = 4; 1063 1064 /* cipher different than null */ 1065 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 1066 cipher_xform.cipher.key.data = test_vector->cipher_key.data; 1067 cipher_xform.cipher.key.length = test_vector->cipher_key.length; 1068 } else { 1069 cipher_xform.cipher.key.data = NULL; 1070 cipher_xform.cipher.key.length = 0; 1071 } 1072 1073 /* Setup Auth Parameters */ 1074 if (options->auth_algo != 0) { 1075 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 1076 auth_xform.next = NULL; 1077 auth_xform.auth.algo = options->auth_algo; 1078 auth_xform.auth.op = options->auth_op; 1079 auth_xform.auth.iv.offset = iv_offset + 1080 cipher_xform.cipher.iv.length; 1081 1082 /* auth different than null */ 1083 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 1084 auth_xform.auth.digest_length = options->digest_sz; 1085 auth_xform.auth.key.length = test_vector->auth_key.length; 1086 auth_xform.auth.key.data = test_vector->auth_key.data; 1087 auth_xform.auth.iv.length = test_vector->auth_iv.length; 1088 } else { 1089 auth_xform.auth.digest_length = 0; 1090 auth_xform.auth.key.length = 0; 1091 auth_xform.auth.key.data = NULL; 1092 auth_xform.auth.iv.length = 0; 1093 } 1094 1095 cipher_xform.next = &auth_xform; 1096 } else { 1097 cipher_xform.next = NULL; 1098 } 1099 1100 struct rte_security_session_conf sess_conf = { 1101 .action_type = RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, 1102 .protocol = RTE_SECURITY_PROTOCOL_PDCP, 1103 {.pdcp = { 1104 .bearer = 0x16, 1105 .domain = options->pdcp_domain, 1106 .pkt_dir = 0, 1107 .sn_size = options->pdcp_sn_sz, 1108 .hfn = options->pdcp_ses_hfn_en ? 1109 PDCP_DEFAULT_HFN : 0, 1110 .hfn_threshold = 0x70C0A, 1111 .sdap_enabled = options->pdcp_sdap, 1112 .hfn_ovrd = !(options->pdcp_ses_hfn_en), 1113 } }, 1114 .crypto_xform = &cipher_xform 1115 }; 1116 1117 void *ctx = rte_cryptodev_get_sec_ctx(dev_id); 1118 1119 /* Create security session */ 1120 return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp); 1121 } 1122 1123 if (options->op_type == CPERF_IPSEC) { 1124 return create_ipsec_session(sess_mp, dev_id, 1125 options, test_vector, iv_offset); 1126 } 1127 1128 if (options->op_type == CPERF_TLS) { 1129 return create_tls_session(sess_mp, dev_id, 1130 options, test_vector, iv_offset); 1131 } 1132 1133 if (options->op_type == CPERF_DOCSIS) { 1134 enum rte_security_docsis_direction direction; 1135 1136 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 1137 cipher_xform.next = NULL; 1138 cipher_xform.cipher.algo = options->cipher_algo; 1139 cipher_xform.cipher.op = options->cipher_op; 1140 cipher_xform.cipher.iv.offset = iv_offset; 1141 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 1142 cipher_xform.cipher.key.data = 1143 test_vector->cipher_key.data; 1144 cipher_xform.cipher.key.length = 1145 test_vector->cipher_key.length; 1146 cipher_xform.cipher.iv.length = 1147 test_vector->cipher_iv.length; 1148 } else { 1149 cipher_xform.cipher.key.data = NULL; 1150 cipher_xform.cipher.key.length = 0; 1151 cipher_xform.cipher.iv.length = 0; 1152 } 1153 cipher_xform.next = NULL; 1154 1155 if (options->cipher_op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) 1156 direction = RTE_SECURITY_DOCSIS_DOWNLINK; 1157 else 1158 direction = RTE_SECURITY_DOCSIS_UPLINK; 1159 1160 struct rte_security_session_conf sess_conf = { 1161 .action_type = 1162 RTE_SECURITY_ACTION_TYPE_LOOKASIDE_PROTOCOL, 1163 .protocol = RTE_SECURITY_PROTOCOL_DOCSIS, 1164 {.docsis = { 1165 .direction = direction, 1166 } }, 1167 .crypto_xform = &cipher_xform 1168 }; 1169 void *ctx = rte_cryptodev_get_sec_ctx(dev_id); 1170 1171 /* Create security session */ 1172 return (void *)rte_security_session_create(ctx, &sess_conf, sess_mp); 1173 } 1174 #endif 1175 /* 1176 * cipher only 1177 */ 1178 if (options->op_type == CPERF_CIPHER_ONLY) { 1179 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 1180 cipher_xform.next = NULL; 1181 cipher_xform.cipher.algo = options->cipher_algo; 1182 cipher_xform.cipher.op = options->cipher_op; 1183 cipher_xform.cipher.iv.offset = iv_offset; 1184 1185 /* cipher different than null */ 1186 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 1187 cipher_xform.cipher.key.data = 1188 test_vector->cipher_key.data; 1189 cipher_xform.cipher.key.length = 1190 test_vector->cipher_key.length; 1191 cipher_xform.cipher.iv.length = 1192 test_vector->cipher_iv.length; 1193 } else { 1194 cipher_xform.cipher.key.data = NULL; 1195 cipher_xform.cipher.key.length = 0; 1196 cipher_xform.cipher.iv.length = 0; 1197 } 1198 /* create crypto session */ 1199 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform, 1200 sess_mp); 1201 /* 1202 * auth only 1203 */ 1204 } else if (options->op_type == CPERF_AUTH_ONLY) { 1205 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 1206 auth_xform.next = NULL; 1207 auth_xform.auth.algo = options->auth_algo; 1208 auth_xform.auth.op = options->auth_op; 1209 auth_xform.auth.iv.offset = iv_offset; 1210 1211 /* auth different than null */ 1212 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 1213 auth_xform.auth.digest_length = 1214 options->digest_sz; 1215 auth_xform.auth.key.length = 1216 test_vector->auth_key.length; 1217 auth_xform.auth.key.data = test_vector->auth_key.data; 1218 auth_xform.auth.iv.length = 1219 test_vector->auth_iv.length; 1220 } else { 1221 auth_xform.auth.digest_length = 0; 1222 auth_xform.auth.key.length = 0; 1223 auth_xform.auth.key.data = NULL; 1224 auth_xform.auth.iv.length = 0; 1225 } 1226 /* create crypto session */ 1227 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform, 1228 sess_mp); 1229 /* 1230 * cipher and auth 1231 */ 1232 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH 1233 || options->op_type == CPERF_AUTH_THEN_CIPHER) { 1234 /* 1235 * cipher 1236 */ 1237 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 1238 cipher_xform.next = NULL; 1239 cipher_xform.cipher.algo = options->cipher_algo; 1240 cipher_xform.cipher.op = options->cipher_op; 1241 cipher_xform.cipher.iv.offset = iv_offset; 1242 1243 /* cipher different than null */ 1244 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 1245 cipher_xform.cipher.key.data = 1246 test_vector->cipher_key.data; 1247 cipher_xform.cipher.key.length = 1248 test_vector->cipher_key.length; 1249 cipher_xform.cipher.iv.length = 1250 test_vector->cipher_iv.length; 1251 } else { 1252 cipher_xform.cipher.key.data = NULL; 1253 cipher_xform.cipher.key.length = 0; 1254 cipher_xform.cipher.iv.length = 0; 1255 } 1256 1257 /* 1258 * auth 1259 */ 1260 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 1261 auth_xform.next = NULL; 1262 auth_xform.auth.algo = options->auth_algo; 1263 auth_xform.auth.op = options->auth_op; 1264 auth_xform.auth.iv.offset = iv_offset + 1265 cipher_xform.cipher.iv.length; 1266 1267 /* auth different than null */ 1268 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 1269 auth_xform.auth.digest_length = options->digest_sz; 1270 auth_xform.auth.iv.length = test_vector->auth_iv.length; 1271 auth_xform.auth.key.length = 1272 test_vector->auth_key.length; 1273 auth_xform.auth.key.data = 1274 test_vector->auth_key.data; 1275 } else { 1276 auth_xform.auth.digest_length = 0; 1277 auth_xform.auth.key.length = 0; 1278 auth_xform.auth.key.data = NULL; 1279 auth_xform.auth.iv.length = 0; 1280 } 1281 1282 /* cipher then auth */ 1283 if (options->op_type == CPERF_CIPHER_THEN_AUTH) { 1284 cipher_xform.next = &auth_xform; 1285 /* create crypto session */ 1286 sess = rte_cryptodev_sym_session_create(dev_id, 1287 &cipher_xform, sess_mp); 1288 } else { /* auth then cipher */ 1289 auth_xform.next = &cipher_xform; 1290 /* create crypto session */ 1291 sess = rte_cryptodev_sym_session_create(dev_id, 1292 &auth_xform, sess_mp); 1293 } 1294 } else { /* options->op_type == CPERF_AEAD */ 1295 aead_xform.type = RTE_CRYPTO_SYM_XFORM_AEAD; 1296 aead_xform.next = NULL; 1297 aead_xform.aead.algo = options->aead_algo; 1298 aead_xform.aead.op = options->aead_op; 1299 aead_xform.aead.iv.offset = iv_offset; 1300 1301 aead_xform.aead.key.data = 1302 test_vector->aead_key.data; 1303 aead_xform.aead.key.length = 1304 test_vector->aead_key.length; 1305 aead_xform.aead.iv.length = test_vector->aead_iv.length; 1306 1307 aead_xform.aead.digest_length = options->digest_sz; 1308 aead_xform.aead.aad_length = 1309 options->aead_aad_sz; 1310 1311 /* Create crypto session */ 1312 sess = rte_cryptodev_sym_session_create(dev_id, &aead_xform, 1313 sess_mp); 1314 } 1315 1316 return sess; 1317 } 1318 1319 int 1320 cperf_get_op_functions(const struct cperf_options *options, 1321 struct cperf_op_fns *op_fns) 1322 { 1323 memset(op_fns, 0, sizeof(struct cperf_op_fns)); 1324 1325 op_fns->sess_create = cperf_create_session; 1326 1327 switch (options->op_type) { 1328 case CPERF_AEAD: 1329 op_fns->populate_ops = cperf_set_ops_aead; 1330 break; 1331 1332 case CPERF_AUTH_THEN_CIPHER: 1333 case CPERF_CIPHER_THEN_AUTH: 1334 op_fns->populate_ops = cperf_set_ops_cipher_auth; 1335 break; 1336 case CPERF_AUTH_ONLY: 1337 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL) 1338 op_fns->populate_ops = cperf_set_ops_null_auth; 1339 else 1340 op_fns->populate_ops = cperf_set_ops_auth; 1341 break; 1342 case CPERF_CIPHER_ONLY: 1343 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL) 1344 op_fns->populate_ops = cperf_set_ops_null_cipher; 1345 else 1346 op_fns->populate_ops = cperf_set_ops_cipher; 1347 break; 1348 case CPERF_ASYM_MODEX: 1349 op_fns->populate_ops = cperf_set_ops_asym_modex; 1350 break; 1351 case CPERF_ASYM_SECP256R1: 1352 op_fns->populate_ops = cperf_set_ops_asym_ecdsa; 1353 break; 1354 case CPERF_ASYM_SM2: 1355 op_fns->populate_ops = cperf_set_ops_asym_sm2; 1356 break; 1357 #ifdef RTE_LIB_SECURITY 1358 case CPERF_PDCP: 1359 case CPERF_DOCSIS: 1360 op_fns->populate_ops = cperf_set_ops_security; 1361 break; 1362 case CPERF_IPSEC: 1363 op_fns->populate_ops = cperf_set_ops_security_ipsec; 1364 break; 1365 case CPERF_TLS: 1366 op_fns->populate_ops = cperf_set_ops_security_tls; 1367 break; 1368 #endif 1369 default: 1370 return -1; 1371 } 1372 1373 return 0; 1374 } 1375