1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved. 3 */ 4 5 #include <string.h> 6 7 #include <rte_common.h> 8 #include <rte_cryptodev_pmd.h> 9 #include <rte_malloc.h> 10 11 #include "ccp_pmd_private.h" 12 #include "ccp_dev.h" 13 #include "ccp_crypto.h" 14 15 #define CCP_BASE_SYM_CRYPTO_CAPABILITIES \ 16 { /* SHA1 */ \ 17 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 18 {.sym = { \ 19 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 20 {.auth = { \ 21 .algo = RTE_CRYPTO_AUTH_SHA1, \ 22 .block_size = 64, \ 23 .key_size = { \ 24 .min = 0, \ 25 .max = 0, \ 26 .increment = 0 \ 27 }, \ 28 .digest_size = { \ 29 .min = 20, \ 30 .max = 20, \ 31 .increment = 0 \ 32 }, \ 33 .aad_size = { 0 } \ 34 }, } \ 35 }, } \ 36 }, \ 37 { /* SHA1 HMAC */ \ 38 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 39 {.sym = { \ 40 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 41 {.auth = { \ 42 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, \ 43 .block_size = 64, \ 44 .key_size = { \ 45 .min = 1, \ 46 .max = 64, \ 47 .increment = 1 \ 48 }, \ 49 .digest_size = { \ 50 .min = 20, \ 51 .max = 20, \ 52 .increment = 0 \ 53 }, \ 54 .aad_size = { 0 } \ 55 }, } \ 56 }, } \ 57 }, \ 58 { /* SHA224 */ \ 59 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 60 {.sym = { \ 61 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 62 {.auth = { \ 63 .algo = RTE_CRYPTO_AUTH_SHA224, \ 64 .block_size = 64, \ 65 .key_size = { \ 66 .min = 0, \ 67 .max = 0, \ 68 .increment = 0 \ 69 }, \ 70 .digest_size = { \ 71 .min = 28, \ 72 .max = 28, \ 73 .increment = 0 \ 74 }, \ 75 .aad_size = { 0 } \ 76 }, } \ 77 }, } \ 78 }, \ 79 { /* SHA224 HMAC */ \ 80 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 81 {.sym = { \ 82 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 83 {.auth = { \ 84 .algo = RTE_CRYPTO_AUTH_SHA224_HMAC, \ 85 .block_size = 64, \ 86 .key_size = { \ 87 .min = 1, \ 88 .max = 64, \ 89 .increment = 1 \ 90 }, \ 91 .digest_size = { \ 92 .min = 28, \ 93 .max = 28, \ 94 .increment = 0 \ 95 }, \ 96 .aad_size = { 0 } \ 97 }, } \ 98 }, } \ 99 }, \ 100 { /* SHA3-224 */ \ 101 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 102 {.sym = { \ 103 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 104 {.auth = { \ 105 .algo = RTE_CRYPTO_AUTH_SHA3_224, \ 106 .block_size = 144, \ 107 .key_size = { \ 108 .min = 0, \ 109 .max = 0, \ 110 .increment = 0 \ 111 }, \ 112 .digest_size = { \ 113 .min = 28, \ 114 .max = 28, \ 115 .increment = 0 \ 116 }, \ 117 .aad_size = { 0 } \ 118 }, } \ 119 }, } \ 120 }, \ 121 { /* SHA3-224 HMAC*/ \ 122 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 123 {.sym = { \ 124 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 125 {.auth = { \ 126 .algo = RTE_CRYPTO_AUTH_SHA3_224_HMAC, \ 127 .block_size = 144, \ 128 .key_size = { \ 129 .min = 1, \ 130 .max = 144, \ 131 .increment = 1 \ 132 }, \ 133 .digest_size = { \ 134 .min = 28, \ 135 .max = 28, \ 136 .increment = 0 \ 137 }, \ 138 .aad_size = { 0 } \ 139 }, } \ 140 }, } \ 141 }, \ 142 { /* SHA256 */ \ 143 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 144 {.sym = { \ 145 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 146 {.auth = { \ 147 .algo = RTE_CRYPTO_AUTH_SHA256, \ 148 .block_size = 64, \ 149 .key_size = { \ 150 .min = 0, \ 151 .max = 0, \ 152 .increment = 0 \ 153 }, \ 154 .digest_size = { \ 155 .min = 32, \ 156 .max = 32, \ 157 .increment = 0 \ 158 }, \ 159 .aad_size = { 0 } \ 160 }, } \ 161 }, } \ 162 }, \ 163 { /* SHA256 HMAC */ \ 164 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 165 {.sym = { \ 166 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 167 {.auth = { \ 168 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, \ 169 .block_size = 64, \ 170 .key_size = { \ 171 .min = 1, \ 172 .max = 64, \ 173 .increment = 1 \ 174 }, \ 175 .digest_size = { \ 176 .min = 32, \ 177 .max = 32, \ 178 .increment = 0 \ 179 }, \ 180 .aad_size = { 0 } \ 181 }, } \ 182 }, } \ 183 }, \ 184 { /* SHA3-256 */ \ 185 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 186 {.sym = { \ 187 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 188 {.auth = { \ 189 .algo = RTE_CRYPTO_AUTH_SHA3_256, \ 190 .block_size = 136, \ 191 .key_size = { \ 192 .min = 0, \ 193 .max = 0, \ 194 .increment = 0 \ 195 }, \ 196 .digest_size = { \ 197 .min = 32, \ 198 .max = 32, \ 199 .increment = 0 \ 200 }, \ 201 .aad_size = { 0 } \ 202 }, } \ 203 }, } \ 204 }, \ 205 { /* SHA3-256-HMAC */ \ 206 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 207 {.sym = { \ 208 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 209 {.auth = { \ 210 .algo = RTE_CRYPTO_AUTH_SHA3_256_HMAC, \ 211 .block_size = 136, \ 212 .key_size = { \ 213 .min = 1, \ 214 .max = 136, \ 215 .increment = 1 \ 216 }, \ 217 .digest_size = { \ 218 .min = 32, \ 219 .max = 32, \ 220 .increment = 0 \ 221 }, \ 222 .aad_size = { 0 } \ 223 }, } \ 224 }, } \ 225 }, \ 226 { /* SHA384 */ \ 227 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 228 {.sym = { \ 229 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 230 {.auth = { \ 231 .algo = RTE_CRYPTO_AUTH_SHA384, \ 232 .block_size = 128, \ 233 .key_size = { \ 234 .min = 0, \ 235 .max = 0, \ 236 .increment = 0 \ 237 }, \ 238 .digest_size = { \ 239 .min = 48, \ 240 .max = 48, \ 241 .increment = 0 \ 242 }, \ 243 .aad_size = { 0 } \ 244 }, } \ 245 }, } \ 246 }, \ 247 { /* SHA384 HMAC */ \ 248 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 249 {.sym = { \ 250 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 251 {.auth = { \ 252 .algo = RTE_CRYPTO_AUTH_SHA384_HMAC, \ 253 .block_size = 128, \ 254 .key_size = { \ 255 .min = 1, \ 256 .max = 128, \ 257 .increment = 1 \ 258 }, \ 259 .digest_size = { \ 260 .min = 48, \ 261 .max = 48, \ 262 .increment = 0 \ 263 }, \ 264 .aad_size = { 0 } \ 265 }, } \ 266 }, } \ 267 }, \ 268 { /* SHA3-384 */ \ 269 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 270 {.sym = { \ 271 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 272 {.auth = { \ 273 .algo = RTE_CRYPTO_AUTH_SHA3_384, \ 274 .block_size = 104, \ 275 .key_size = { \ 276 .min = 0, \ 277 .max = 0, \ 278 .increment = 0 \ 279 }, \ 280 .digest_size = { \ 281 .min = 48, \ 282 .max = 48, \ 283 .increment = 0 \ 284 }, \ 285 .aad_size = { 0 } \ 286 }, } \ 287 }, } \ 288 }, \ 289 { /* SHA3-384-HMAC */ \ 290 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 291 {.sym = { \ 292 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 293 {.auth = { \ 294 .algo = RTE_CRYPTO_AUTH_SHA3_384_HMAC, \ 295 .block_size = 104, \ 296 .key_size = { \ 297 .min = 1, \ 298 .max = 104, \ 299 .increment = 1 \ 300 }, \ 301 .digest_size = { \ 302 .min = 48, \ 303 .max = 48, \ 304 .increment = 0 \ 305 }, \ 306 .aad_size = { 0 } \ 307 }, } \ 308 }, } \ 309 }, \ 310 { /* SHA512 */ \ 311 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 312 {.sym = { \ 313 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 314 {.auth = { \ 315 .algo = RTE_CRYPTO_AUTH_SHA512, \ 316 .block_size = 128, \ 317 .key_size = { \ 318 .min = 0, \ 319 .max = 0, \ 320 .increment = 0 \ 321 }, \ 322 .digest_size = { \ 323 .min = 64, \ 324 .max = 64, \ 325 .increment = 0 \ 326 }, \ 327 .aad_size = { 0 } \ 328 }, } \ 329 }, } \ 330 }, \ 331 { /* SHA512 HMAC */ \ 332 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 333 {.sym = { \ 334 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 335 {.auth = { \ 336 .algo = RTE_CRYPTO_AUTH_SHA512_HMAC, \ 337 .block_size = 128, \ 338 .key_size = { \ 339 .min = 1, \ 340 .max = 128, \ 341 .increment = 1 \ 342 }, \ 343 .digest_size = { \ 344 .min = 64, \ 345 .max = 64, \ 346 .increment = 0 \ 347 }, \ 348 .aad_size = { 0 } \ 349 }, } \ 350 }, } \ 351 }, \ 352 { /* SHA3-512 */ \ 353 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 354 {.sym = { \ 355 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 356 {.auth = { \ 357 .algo = RTE_CRYPTO_AUTH_SHA3_512, \ 358 .block_size = 72, \ 359 .key_size = { \ 360 .min = 0, \ 361 .max = 0, \ 362 .increment = 0 \ 363 }, \ 364 .digest_size = { \ 365 .min = 64, \ 366 .max = 64, \ 367 .increment = 0 \ 368 }, \ 369 .aad_size = { 0 } \ 370 }, } \ 371 }, } \ 372 }, \ 373 { /* SHA3-512-HMAC */ \ 374 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 375 {.sym = { \ 376 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 377 {.auth = { \ 378 .algo = RTE_CRYPTO_AUTH_SHA3_512_HMAC, \ 379 .block_size = 72, \ 380 .key_size = { \ 381 .min = 1, \ 382 .max = 72, \ 383 .increment = 1 \ 384 }, \ 385 .digest_size = { \ 386 .min = 64, \ 387 .max = 64, \ 388 .increment = 0 \ 389 }, \ 390 .aad_size = { 0 } \ 391 }, } \ 392 }, } \ 393 }, \ 394 { /*AES-CMAC */ \ 395 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 396 {.sym = { \ 397 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 398 {.auth = { \ 399 .algo = RTE_CRYPTO_AUTH_AES_CMAC, \ 400 .block_size = 16, \ 401 .key_size = { \ 402 .min = 16, \ 403 .max = 32, \ 404 .increment = 8 \ 405 }, \ 406 .digest_size = { \ 407 .min = 16, \ 408 .max = 16, \ 409 .increment = 0 \ 410 }, \ 411 }, } \ 412 }, } \ 413 }, \ 414 { /* AES ECB */ \ 415 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 416 {.sym = { \ 417 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \ 418 {.cipher = { \ 419 .algo = RTE_CRYPTO_CIPHER_AES_ECB, \ 420 .block_size = 16, \ 421 .key_size = { \ 422 .min = 16, \ 423 .max = 32, \ 424 .increment = 8 \ 425 }, \ 426 .iv_size = { \ 427 .min = 0, \ 428 .max = 0, \ 429 .increment = 0 \ 430 } \ 431 }, } \ 432 }, } \ 433 }, \ 434 { /* AES CBC */ \ 435 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 436 {.sym = { \ 437 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \ 438 {.cipher = { \ 439 .algo = RTE_CRYPTO_CIPHER_AES_CBC, \ 440 .block_size = 16, \ 441 .key_size = { \ 442 .min = 16, \ 443 .max = 32, \ 444 .increment = 8 \ 445 }, \ 446 .iv_size = { \ 447 .min = 16, \ 448 .max = 16, \ 449 .increment = 0 \ 450 } \ 451 }, } \ 452 }, } \ 453 }, \ 454 { /* AES CTR */ \ 455 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 456 {.sym = { \ 457 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \ 458 {.cipher = { \ 459 .algo = RTE_CRYPTO_CIPHER_AES_CTR, \ 460 .block_size = 16, \ 461 .key_size = { \ 462 .min = 16, \ 463 .max = 32, \ 464 .increment = 8 \ 465 }, \ 466 .iv_size = { \ 467 .min = 16, \ 468 .max = 16, \ 469 .increment = 0 \ 470 } \ 471 }, } \ 472 }, } \ 473 }, \ 474 { /* 3DES CBC */ \ 475 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 476 {.sym = { \ 477 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, \ 478 {.cipher = { \ 479 .algo = RTE_CRYPTO_CIPHER_3DES_CBC, \ 480 .block_size = 8, \ 481 .key_size = { \ 482 .min = 16, \ 483 .max = 24, \ 484 .increment = 8 \ 485 }, \ 486 .iv_size = { \ 487 .min = 8, \ 488 .max = 8, \ 489 .increment = 0 \ 490 } \ 491 }, } \ 492 }, } \ 493 }, \ 494 { /* AES GCM */ \ 495 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 496 {.sym = { \ 497 .xform_type = RTE_CRYPTO_SYM_XFORM_AEAD, \ 498 {.aead = { \ 499 .algo = RTE_CRYPTO_AEAD_AES_GCM, \ 500 .block_size = 16, \ 501 .key_size = { \ 502 .min = 16, \ 503 .max = 32, \ 504 .increment = 8 \ 505 }, \ 506 .digest_size = { \ 507 .min = 16, \ 508 .max = 16, \ 509 .increment = 0 \ 510 }, \ 511 .aad_size = { \ 512 .min = 0, \ 513 .max = 65535, \ 514 .increment = 1 \ 515 }, \ 516 .iv_size = { \ 517 .min = 12, \ 518 .max = 16, \ 519 .increment = 4 \ 520 }, \ 521 }, } \ 522 }, } \ 523 } 524 525 #define CCP_EXTRA_SYM_CRYPTO_CAPABILITIES \ 526 { /* MD5 HMAC */ \ 527 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, \ 528 {.sym = { \ 529 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, \ 530 {.auth = { \ 531 .algo = RTE_CRYPTO_AUTH_MD5_HMAC, \ 532 .block_size = 64, \ 533 .key_size = { \ 534 .min = 1, \ 535 .max = 64, \ 536 .increment = 1 \ 537 }, \ 538 .digest_size = { \ 539 .min = 16, \ 540 .max = 16, \ 541 .increment = 0 \ 542 }, \ 543 .aad_size = { 0 } \ 544 }, } \ 545 }, } \ 546 } 547 548 static const struct rte_cryptodev_capabilities ccp_crypto_cap[] = { 549 CCP_BASE_SYM_CRYPTO_CAPABILITIES, 550 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() 551 }; 552 553 static const struct rte_cryptodev_capabilities ccp_crypto_cap_complete[] = { 554 CCP_EXTRA_SYM_CRYPTO_CAPABILITIES, 555 CCP_BASE_SYM_CRYPTO_CAPABILITIES, 556 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() 557 }; 558 559 static int 560 ccp_pmd_config(struct rte_cryptodev *dev __rte_unused, 561 struct rte_cryptodev_config *config __rte_unused) 562 { 563 return 0; 564 } 565 566 static int 567 ccp_pmd_start(struct rte_cryptodev *dev) 568 { 569 return ccp_dev_start(dev); 570 } 571 572 static void 573 ccp_pmd_stop(struct rte_cryptodev *dev __rte_unused) 574 { 575 576 } 577 578 static int 579 ccp_pmd_close(struct rte_cryptodev *dev __rte_unused) 580 { 581 return 0; 582 } 583 584 static void 585 ccp_pmd_stats_get(struct rte_cryptodev *dev, 586 struct rte_cryptodev_stats *stats) 587 { 588 int qp_id; 589 590 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) { 591 struct ccp_qp *qp = dev->data->queue_pairs[qp_id]; 592 593 stats->enqueued_count += qp->qp_stats.enqueued_count; 594 stats->dequeued_count += qp->qp_stats.dequeued_count; 595 596 stats->enqueue_err_count += qp->qp_stats.enqueue_err_count; 597 stats->dequeue_err_count += qp->qp_stats.dequeue_err_count; 598 } 599 600 } 601 602 static void 603 ccp_pmd_stats_reset(struct rte_cryptodev *dev) 604 { 605 int qp_id; 606 607 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) { 608 struct ccp_qp *qp = dev->data->queue_pairs[qp_id]; 609 610 memset(&qp->qp_stats, 0, sizeof(qp->qp_stats)); 611 } 612 } 613 614 static void 615 ccp_pmd_info_get(struct rte_cryptodev *dev, 616 struct rte_cryptodev_info *dev_info) 617 { 618 struct ccp_private *internals = dev->data->dev_private; 619 620 if (dev_info != NULL) { 621 dev_info->driver_id = dev->driver_id; 622 dev_info->feature_flags = dev->feature_flags; 623 dev_info->capabilities = ccp_crypto_cap; 624 if (internals->auth_opt == 1) 625 dev_info->capabilities = ccp_crypto_cap_complete; 626 dev_info->max_nb_queue_pairs = internals->max_nb_qpairs; 627 /* No limit of number of sessions */ 628 dev_info->sym.max_nb_sessions = 0; 629 } 630 } 631 632 static int 633 ccp_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id) 634 { 635 struct ccp_qp *qp; 636 637 if (dev->data->queue_pairs[qp_id] != NULL) { 638 qp = (struct ccp_qp *)dev->data->queue_pairs[qp_id]; 639 rte_ring_free(qp->processed_pkts); 640 rte_mempool_free(qp->batch_mp); 641 rte_free(qp); 642 dev->data->queue_pairs[qp_id] = NULL; 643 } 644 return 0; 645 } 646 647 static int 648 ccp_pmd_qp_set_unique_name(struct rte_cryptodev *dev, 649 struct ccp_qp *qp) 650 { 651 unsigned int n = snprintf(qp->name, sizeof(qp->name), 652 "ccp_pmd_%u_qp_%u", 653 dev->data->dev_id, qp->id); 654 655 if (n > sizeof(qp->name)) 656 return -1; 657 658 return 0; 659 } 660 661 static struct rte_ring * 662 ccp_pmd_qp_create_batch_info_ring(struct ccp_qp *qp, 663 unsigned int ring_size, int socket_id) 664 { 665 struct rte_ring *r; 666 667 r = rte_ring_lookup(qp->name); 668 if (r) { 669 if (r->size >= ring_size) { 670 CCP_LOG_INFO( 671 "Reusing ring %s for processed packets", 672 qp->name); 673 return r; 674 } 675 CCP_LOG_INFO( 676 "Unable to reuse ring %s for processed packets", 677 qp->name); 678 return NULL; 679 } 680 681 return rte_ring_create(qp->name, ring_size, socket_id, 682 RING_F_SP_ENQ | RING_F_SC_DEQ); 683 } 684 685 static int 686 ccp_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id, 687 const struct rte_cryptodev_qp_conf *qp_conf, 688 int socket_id) 689 { 690 struct ccp_private *internals = dev->data->dev_private; 691 struct ccp_qp *qp; 692 int retval = 0; 693 694 if (qp_id >= internals->max_nb_qpairs) { 695 CCP_LOG_ERR("Invalid qp_id %u, should be less than %u", 696 qp_id, internals->max_nb_qpairs); 697 return (-EINVAL); 698 } 699 700 /* Free memory prior to re-allocation if needed. */ 701 if (dev->data->queue_pairs[qp_id] != NULL) 702 ccp_pmd_qp_release(dev, qp_id); 703 704 /* Allocate the queue pair data structure. */ 705 qp = rte_zmalloc_socket("CCP Crypto PMD Queue Pair", sizeof(*qp), 706 RTE_CACHE_LINE_SIZE, socket_id); 707 if (qp == NULL) { 708 CCP_LOG_ERR("Failed to allocate queue pair memory"); 709 return (-ENOMEM); 710 } 711 712 qp->dev = dev; 713 qp->id = qp_id; 714 dev->data->queue_pairs[qp_id] = qp; 715 716 retval = ccp_pmd_qp_set_unique_name(dev, qp); 717 if (retval) { 718 CCP_LOG_ERR("Failed to create unique name for ccp qp"); 719 goto qp_setup_cleanup; 720 } 721 722 qp->processed_pkts = ccp_pmd_qp_create_batch_info_ring(qp, 723 qp_conf->nb_descriptors, socket_id); 724 if (qp->processed_pkts == NULL) { 725 CCP_LOG_ERR("Failed to create batch info ring"); 726 goto qp_setup_cleanup; 727 } 728 729 qp->sess_mp = qp_conf->mp_session; 730 qp->sess_mp_priv = qp_conf->mp_session_private; 731 732 /* mempool for batch info */ 733 qp->batch_mp = rte_mempool_create( 734 qp->name, 735 qp_conf->nb_descriptors, 736 sizeof(struct ccp_batch_info), 737 RTE_CACHE_LINE_SIZE, 738 0, NULL, NULL, NULL, NULL, 739 SOCKET_ID_ANY, 0); 740 if (qp->batch_mp == NULL) 741 goto qp_setup_cleanup; 742 memset(&qp->qp_stats, 0, sizeof(qp->qp_stats)); 743 return 0; 744 745 qp_setup_cleanup: 746 dev->data->queue_pairs[qp_id] = NULL; 747 if (qp) 748 rte_free(qp); 749 return -1; 750 } 751 752 static uint32_t 753 ccp_pmd_qp_count(struct rte_cryptodev *dev) 754 { 755 return dev->data->nb_queue_pairs; 756 } 757 758 static unsigned 759 ccp_pmd_sym_session_get_size(struct rte_cryptodev *dev __rte_unused) 760 { 761 return sizeof(struct ccp_session); 762 } 763 764 static int 765 ccp_pmd_sym_session_configure(struct rte_cryptodev *dev, 766 struct rte_crypto_sym_xform *xform, 767 struct rte_cryptodev_sym_session *sess, 768 struct rte_mempool *mempool) 769 { 770 int ret; 771 void *sess_private_data; 772 struct ccp_private *internals; 773 774 if (unlikely(sess == NULL || xform == NULL)) { 775 CCP_LOG_ERR("Invalid session struct or xform"); 776 return -ENOMEM; 777 } 778 779 if (rte_mempool_get(mempool, &sess_private_data)) { 780 CCP_LOG_ERR("Couldn't get object from session mempool"); 781 return -ENOMEM; 782 } 783 internals = (struct ccp_private *)dev->data->dev_private; 784 ret = ccp_set_session_parameters(sess_private_data, xform, internals); 785 if (ret != 0) { 786 CCP_LOG_ERR("failed configure session parameters"); 787 788 /* Return session to mempool */ 789 rte_mempool_put(mempool, sess_private_data); 790 return ret; 791 } 792 set_sym_session_private_data(sess, dev->driver_id, 793 sess_private_data); 794 795 return 0; 796 } 797 798 static void 799 ccp_pmd_sym_session_clear(struct rte_cryptodev *dev, 800 struct rte_cryptodev_sym_session *sess) 801 { 802 uint8_t index = dev->driver_id; 803 void *sess_priv = get_sym_session_private_data(sess, index); 804 805 if (sess_priv) { 806 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv); 807 808 rte_mempool_put(sess_mp, sess_priv); 809 memset(sess_priv, 0, sizeof(struct ccp_session)); 810 set_sym_session_private_data(sess, index, NULL); 811 } 812 } 813 814 struct rte_cryptodev_ops ccp_ops = { 815 .dev_configure = ccp_pmd_config, 816 .dev_start = ccp_pmd_start, 817 .dev_stop = ccp_pmd_stop, 818 .dev_close = ccp_pmd_close, 819 820 .stats_get = ccp_pmd_stats_get, 821 .stats_reset = ccp_pmd_stats_reset, 822 823 .dev_infos_get = ccp_pmd_info_get, 824 825 .queue_pair_setup = ccp_pmd_qp_setup, 826 .queue_pair_release = ccp_pmd_qp_release, 827 .queue_pair_count = ccp_pmd_qp_count, 828 829 .sym_session_get_size = ccp_pmd_sym_session_get_size, 830 .sym_session_configure = ccp_pmd_sym_session_configure, 831 .sym_session_clear = ccp_pmd_sym_session_clear, 832 }; 833 834 struct rte_cryptodev_ops *ccp_pmd_ops = &ccp_ops; 835