1 /* 2 * BSD LICENSE 3 * 4 * Copyright (C) Cavium networks Ltd. 2017. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 10 * * Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * * Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in 14 * the documentation and/or other materials provided with the 15 * distribution. 16 * * Neither the name of Cavium networks nor the names of its 17 * contributors may be used to endorse or promote products derived 18 * from this software without specific prior written permission. 19 * 20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 */ 32 33 #include <stdbool.h> 34 35 #include <rte_common.h> 36 #include <rte_hexdump.h> 37 #include <rte_cryptodev.h> 38 #include <rte_cryptodev_pmd.h> 39 #include <rte_cryptodev_vdev.h> 40 #include <rte_vdev.h> 41 #include <rte_malloc.h> 42 #include <rte_cpuflags.h> 43 44 #include "armv8_crypto_defs.h" 45 46 #include "rte_armv8_pmd_private.h" 47 48 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev); 49 50 /** 51 * Pointers to the supported combined mode crypto functions are stored 52 * in the static tables. Each combined (chained) cryptographic operation 53 * can be described by a set of numbers: 54 * - order: order of operations (cipher, auth) or (auth, cipher) 55 * - direction: encryption or decryption 56 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc. 57 * - aalg: authentication algorithm such as SHA1, SHA256, etc. 58 * - keyl: cipher key length, for example 128, 192, 256 bits 59 * 60 * In order to quickly acquire each function pointer based on those numbers, 61 * a hierarchy of arrays is maintained. The final level, 3D array is indexed 62 * by the combined mode function parameters only (cipher algorithm, 63 * authentication algorithm and key length). 64 * 65 * This gives 3 memory accesses to obtain a function pointer instead of 66 * traversing the array manually and comparing function parameters on each loop. 67 * 68 * +--+CRYPTO_FUNC 69 * +--+ENC| 70 * +--+CA| 71 * | +--+DEC 72 * ORDER| 73 * | +--+ENC 74 * +--+AC| 75 * +--+DEC 76 * 77 */ 78 79 /** 80 * 3D array type for ARM Combined Mode crypto functions pointers. 81 * CRYPTO_CIPHER_MAX: max cipher ID number 82 * CRYPTO_AUTH_MAX: max auth ID number 83 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number 84 */ 85 typedef const crypto_func_t 86 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX]; 87 88 /* Evaluate to key length definition */ 89 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl) 90 91 /* Local aliases for supported ciphers */ 92 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC 93 /* Local aliases for supported hashes */ 94 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC 95 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC 96 97 /** 98 * Arrays containing pointers to particular cryptographic, 99 * combined mode functions. 100 * crypto_op_ca_encrypt: cipher (encrypt), authenticate 101 * crypto_op_ca_decrypt: cipher (decrypt), authenticate 102 * crypto_op_ac_encrypt: authenticate, cipher (encrypt) 103 * crypto_op_ac_decrypt: authenticate, cipher (decrypt) 104 */ 105 static const crypto_func_tbl_t 106 crypto_op_ca_encrypt = { 107 /* [cipher alg][auth alg][key length] = crypto_function, */ 108 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac, 109 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac, 110 }; 111 112 static const crypto_func_tbl_t 113 crypto_op_ca_decrypt = { 114 NULL 115 }; 116 117 static const crypto_func_tbl_t 118 crypto_op_ac_encrypt = { 119 NULL 120 }; 121 122 static const crypto_func_tbl_t 123 crypto_op_ac_decrypt = { 124 /* [cipher alg][auth alg][key length] = crypto_function, */ 125 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec, 126 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec, 127 }; 128 129 /** 130 * Arrays containing pointers to particular cryptographic function sets, 131 * covering given cipher operation directions (encrypt, decrypt) 132 * for each order of cipher and authentication pairs. 133 */ 134 static const crypto_func_tbl_t * 135 crypto_cipher_auth[] = { 136 &crypto_op_ca_encrypt, 137 &crypto_op_ca_decrypt, 138 NULL 139 }; 140 141 static const crypto_func_tbl_t * 142 crypto_auth_cipher[] = { 143 &crypto_op_ac_encrypt, 144 &crypto_op_ac_decrypt, 145 NULL 146 }; 147 148 /** 149 * Top level array containing pointers to particular cryptographic 150 * function sets, covering given order of chained operations. 151 * crypto_cipher_auth: cipher first, authenticate after 152 * crypto_auth_cipher: authenticate first, cipher after 153 */ 154 static const crypto_func_tbl_t ** 155 crypto_chain_order[] = { 156 crypto_cipher_auth, 157 crypto_auth_cipher, 158 NULL 159 }; 160 161 /** 162 * Extract particular combined mode crypto function from the 3D array. 163 */ 164 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \ 165 ({ \ 166 crypto_func_tbl_t *func_tbl = \ 167 (crypto_chain_order[(order)])[(cop)]; \ 168 \ 169 ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \ 170 }) 171 172 /*----------------------------------------------------------------------------*/ 173 174 /** 175 * 2D array type for ARM key schedule functions pointers. 176 * CRYPTO_CIPHER_MAX: max cipher ID number 177 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number 178 */ 179 typedef const crypto_key_sched_t 180 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX]; 181 182 static const crypto_key_sched_tbl_t 183 crypto_key_sched_encrypt = { 184 /* [cipher alg][key length] = key_expand_func, */ 185 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc, 186 }; 187 188 static const crypto_key_sched_tbl_t 189 crypto_key_sched_decrypt = { 190 /* [cipher alg][key length] = key_expand_func, */ 191 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec, 192 }; 193 194 /** 195 * Top level array containing pointers to particular key generation 196 * function sets, covering given operation direction. 197 * crypto_key_sched_encrypt: keys for encryption 198 * crypto_key_sched_decrypt: keys for decryption 199 */ 200 static const crypto_key_sched_tbl_t * 201 crypto_key_sched_dir[] = { 202 &crypto_key_sched_encrypt, 203 &crypto_key_sched_decrypt, 204 NULL 205 }; 206 207 /** 208 * Extract particular combined mode crypto function from the 3D array. 209 */ 210 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \ 211 ({ \ 212 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \ 213 \ 214 ((*ks_tbl)[(calg)][KEYL(keyl)]); \ 215 }) 216 217 /*----------------------------------------------------------------------------*/ 218 219 /* 220 *------------------------------------------------------------------------------ 221 * Session Prepare 222 *------------------------------------------------------------------------------ 223 */ 224 225 /** Get xform chain order */ 226 static enum armv8_crypto_chain_order 227 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform) 228 { 229 230 /* 231 * This driver currently covers only chained operations. 232 * Ignore only cipher or only authentication operations 233 * or chains longer than 2 xform structures. 234 */ 235 if (xform->next == NULL || xform->next->next != NULL) 236 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED; 237 238 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) { 239 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) 240 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER; 241 } 242 243 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) { 244 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) 245 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH; 246 } 247 248 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED; 249 } 250 251 static inline void 252 auth_hmac_pad_prepare(struct armv8_crypto_session *sess, 253 const struct rte_crypto_sym_xform *xform) 254 { 255 size_t i; 256 257 /* Generate i_key_pad and o_key_pad */ 258 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad)); 259 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key, 260 xform->auth.key.length); 261 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad)); 262 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key, 263 xform->auth.key.length); 264 /* 265 * XOR key with IPAD/OPAD values to obtain i_key_pad 266 * and o_key_pad. 267 * Byte-by-byte operation may seem to be the less efficient 268 * here but in fact it's the opposite. 269 * The result ASM code is likely operate on NEON registers 270 * (load auth key to Qx, load IPAD/OPAD to multiple 271 * elements of Qy, eor 128 bits at once). 272 */ 273 for (i = 0; i < SHA_BLOCK_MAX; i++) { 274 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE; 275 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE; 276 } 277 } 278 279 static inline int 280 auth_set_prerequisites(struct armv8_crypto_session *sess, 281 const struct rte_crypto_sym_xform *xform) 282 { 283 uint8_t partial[64] = { 0 }; 284 int error; 285 286 switch (xform->auth.algo) { 287 case RTE_CRYPTO_AUTH_SHA1_HMAC: 288 /* 289 * Generate authentication key, i_key_pad and o_key_pad. 290 */ 291 /* Zero memory under key */ 292 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH); 293 294 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) { 295 /* 296 * In case the key is longer than 160 bits 297 * the algorithm will use SHA1(key) instead. 298 */ 299 error = sha1_block(NULL, xform->auth.key.data, 300 sess->auth.hmac.key, xform->auth.key.length); 301 if (error != 0) 302 return -1; 303 } else { 304 /* 305 * Now copy the given authentication key to the session 306 * key assuming that the session key is zeroed there is 307 * no need for additional zero padding if the key is 308 * shorter than SHA1_AUTH_KEY_LENGTH. 309 */ 310 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data, 311 xform->auth.key.length); 312 } 313 314 /* Prepare HMAC padding: key|pattern */ 315 auth_hmac_pad_prepare(sess, xform); 316 /* 317 * Calculate partial hash values for i_key_pad and o_key_pad. 318 * Will be used as initialization state for final HMAC. 319 */ 320 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad, 321 partial, SHA1_BLOCK_SIZE); 322 if (error != 0) 323 return -1; 324 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE); 325 326 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad, 327 partial, SHA1_BLOCK_SIZE); 328 if (error != 0) 329 return -1; 330 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE); 331 332 break; 333 case RTE_CRYPTO_AUTH_SHA256_HMAC: 334 /* 335 * Generate authentication key, i_key_pad and o_key_pad. 336 */ 337 /* Zero memory under key */ 338 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH); 339 340 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) { 341 /* 342 * In case the key is longer than 256 bits 343 * the algorithm will use SHA256(key) instead. 344 */ 345 error = sha256_block(NULL, xform->auth.key.data, 346 sess->auth.hmac.key, xform->auth.key.length); 347 if (error != 0) 348 return -1; 349 } else { 350 /* 351 * Now copy the given authentication key to the session 352 * key assuming that the session key is zeroed there is 353 * no need for additional zero padding if the key is 354 * shorter than SHA256_AUTH_KEY_LENGTH. 355 */ 356 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data, 357 xform->auth.key.length); 358 } 359 360 /* Prepare HMAC padding: key|pattern */ 361 auth_hmac_pad_prepare(sess, xform); 362 /* 363 * Calculate partial hash values for i_key_pad and o_key_pad. 364 * Will be used as initialization state for final HMAC. 365 */ 366 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad, 367 partial, SHA256_BLOCK_SIZE); 368 if (error != 0) 369 return -1; 370 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE); 371 372 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad, 373 partial, SHA256_BLOCK_SIZE); 374 if (error != 0) 375 return -1; 376 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE); 377 378 break; 379 default: 380 break; 381 } 382 383 return 0; 384 } 385 386 static inline int 387 cipher_set_prerequisites(struct armv8_crypto_session *sess, 388 const struct rte_crypto_sym_xform *xform) 389 { 390 crypto_key_sched_t cipher_key_sched; 391 392 cipher_key_sched = sess->cipher.key_sched; 393 if (likely(cipher_key_sched != NULL)) { 394 /* Set up cipher session key */ 395 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data); 396 } 397 398 return 0; 399 } 400 401 static int 402 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess, 403 const struct rte_crypto_sym_xform *cipher_xform, 404 const struct rte_crypto_sym_xform *auth_xform) 405 { 406 enum armv8_crypto_chain_order order; 407 enum armv8_crypto_cipher_operation cop; 408 enum rte_crypto_cipher_algorithm calg; 409 enum rte_crypto_auth_algorithm aalg; 410 411 /* Validate and prepare scratch order of combined operations */ 412 switch (sess->chain_order) { 413 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 414 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 415 order = sess->chain_order; 416 break; 417 default: 418 return -EINVAL; 419 } 420 /* Select cipher direction */ 421 sess->cipher.direction = cipher_xform->cipher.op; 422 /* Select cipher key */ 423 sess->cipher.key.length = cipher_xform->cipher.key.length; 424 /* Set cipher direction */ 425 cop = sess->cipher.direction; 426 /* Set cipher algorithm */ 427 calg = cipher_xform->cipher.algo; 428 429 /* Select cipher algo */ 430 switch (calg) { 431 /* Cover supported cipher algorithms */ 432 case RTE_CRYPTO_CIPHER_AES_CBC: 433 sess->cipher.algo = calg; 434 /* IV len is always 16 bytes (block size) for AES CBC */ 435 sess->cipher.iv_len = 16; 436 break; 437 default: 438 return -EINVAL; 439 } 440 /* Select auth generate/verify */ 441 sess->auth.operation = auth_xform->auth.op; 442 443 /* Select auth algo */ 444 switch (auth_xform->auth.algo) { 445 /* Cover supported hash algorithms */ 446 case RTE_CRYPTO_AUTH_SHA1_HMAC: 447 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */ 448 aalg = auth_xform->auth.algo; 449 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC; 450 break; 451 default: 452 return -EINVAL; 453 } 454 455 /* Verify supported key lengths and extract proper algorithm */ 456 switch (cipher_xform->cipher.key.length << 3) { 457 case 128: 458 sess->crypto_func = 459 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128); 460 sess->cipher.key_sched = 461 CRYPTO_GET_KEY_SCHED(cop, calg, 128); 462 break; 463 case 192: 464 case 256: 465 /* These key lengths are not supported yet */ 466 default: /* Fall through */ 467 sess->crypto_func = NULL; 468 sess->cipher.key_sched = NULL; 469 return -EINVAL; 470 } 471 472 if (unlikely(sess->crypto_func == NULL)) { 473 /* 474 * If we got here that means that there must be a bug 475 * in the algorithms selection above. Nevertheless keep 476 * it here to catch bug immediately and avoid NULL pointer 477 * dereference in OPs processing. 478 */ 479 ARMV8_CRYPTO_LOG_ERR( 480 "No appropriate crypto function for given parameters"); 481 return -EINVAL; 482 } 483 484 /* Set up cipher session prerequisites */ 485 if (cipher_set_prerequisites(sess, cipher_xform) != 0) 486 return -EINVAL; 487 488 /* Set up authentication session prerequisites */ 489 if (auth_set_prerequisites(sess, auth_xform) != 0) 490 return -EINVAL; 491 492 return 0; 493 } 494 495 /** Parse crypto xform chain and set private session parameters */ 496 int 497 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess, 498 const struct rte_crypto_sym_xform *xform) 499 { 500 const struct rte_crypto_sym_xform *cipher_xform = NULL; 501 const struct rte_crypto_sym_xform *auth_xform = NULL; 502 bool is_chained_op; 503 int ret; 504 505 /* Filter out spurious/broken requests */ 506 if (xform == NULL) 507 return -EINVAL; 508 509 sess->chain_order = armv8_crypto_get_chain_order(xform); 510 switch (sess->chain_order) { 511 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 512 cipher_xform = xform; 513 auth_xform = xform->next; 514 is_chained_op = true; 515 break; 516 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 517 auth_xform = xform; 518 cipher_xform = xform->next; 519 is_chained_op = true; 520 break; 521 default: 522 is_chained_op = false; 523 return -EINVAL; 524 } 525 526 if (is_chained_op) { 527 ret = armv8_crypto_set_session_chained_parameters(sess, 528 cipher_xform, auth_xform); 529 if (unlikely(ret != 0)) { 530 ARMV8_CRYPTO_LOG_ERR( 531 "Invalid/unsupported chained (cipher/auth) parameters"); 532 return -EINVAL; 533 } 534 } else { 535 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation"); 536 return -EINVAL; 537 } 538 539 return 0; 540 } 541 542 /** Provide session for operation */ 543 static inline struct armv8_crypto_session * 544 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op) 545 { 546 struct armv8_crypto_session *sess = NULL; 547 548 if (op->sym->sess_type == RTE_CRYPTO_SYM_OP_WITH_SESSION) { 549 /* get existing session */ 550 if (likely(op->sym->session != NULL && 551 op->sym->session->dev_type == 552 RTE_CRYPTODEV_ARMV8_PMD)) { 553 sess = (struct armv8_crypto_session *) 554 op->sym->session->_private; 555 } 556 } else { 557 /* provide internal session */ 558 void *_sess = NULL; 559 560 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) { 561 sess = (struct armv8_crypto_session *) 562 ((struct rte_cryptodev_sym_session *)_sess) 563 ->_private; 564 565 if (unlikely(armv8_crypto_set_session_parameters( 566 sess, op->sym->xform) != 0)) { 567 rte_mempool_put(qp->sess_mp, _sess); 568 sess = NULL; 569 } else 570 op->sym->session = _sess; 571 } 572 } 573 574 if (unlikely(sess == NULL)) 575 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION; 576 577 return sess; 578 } 579 580 /* 581 *------------------------------------------------------------------------------ 582 * Process Operations 583 *------------------------------------------------------------------------------ 584 */ 585 586 /*----------------------------------------------------------------------------*/ 587 588 /** Process cipher operation */ 589 static inline void 590 process_armv8_chained_op 591 (struct rte_crypto_op *op, struct armv8_crypto_session *sess, 592 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst) 593 { 594 crypto_func_t crypto_func; 595 crypto_arg_t arg; 596 struct rte_mbuf *m_asrc, *m_adst; 597 uint8_t *csrc, *cdst; 598 uint8_t *adst, *asrc; 599 uint64_t clen, alen; 600 int error; 601 602 clen = op->sym->cipher.data.length; 603 alen = op->sym->auth.data.length; 604 605 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *, 606 op->sym->cipher.data.offset); 607 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *, 608 op->sym->cipher.data.offset); 609 610 switch (sess->chain_order) { 611 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 612 m_asrc = m_adst = mbuf_dst; 613 break; 614 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 615 m_asrc = mbuf_src; 616 m_adst = mbuf_dst; 617 break; 618 default: 619 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 620 return; 621 } 622 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *, 623 op->sym->auth.data.offset); 624 625 switch (sess->auth.mode) { 626 case ARMV8_CRYPTO_AUTH_AS_AUTH: 627 /* Nothing to do here, just verify correct option */ 628 break; 629 case ARMV8_CRYPTO_AUTH_AS_HMAC: 630 arg.digest.hmac.key = sess->auth.hmac.key; 631 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad; 632 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad; 633 break; 634 default: 635 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 636 return; 637 } 638 639 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) { 640 adst = op->sym->auth.digest.data; 641 if (adst == NULL) { 642 adst = rte_pktmbuf_mtod_offset(m_adst, 643 uint8_t *, 644 op->sym->auth.data.offset + 645 op->sym->auth.data.length); 646 } 647 } else { 648 adst = (uint8_t *)rte_pktmbuf_append(m_asrc, 649 op->sym->auth.digest.length); 650 } 651 652 if (unlikely(op->sym->cipher.iv.length != sess->cipher.iv_len)) { 653 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 654 return; 655 } 656 657 arg.cipher.iv = op->sym->cipher.iv.data; 658 arg.cipher.key = sess->cipher.key.data; 659 /* Acquire combined mode function */ 660 crypto_func = sess->crypto_func; 661 ARMV8_CRYPTO_ASSERT(crypto_func != NULL); 662 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg); 663 if (error != 0) { 664 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 665 return; 666 } 667 668 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 669 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) { 670 if (memcmp(adst, op->sym->auth.digest.data, 671 op->sym->auth.digest.length) != 0) { 672 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED; 673 } 674 /* Trim area used for digest from mbuf. */ 675 rte_pktmbuf_trim(m_asrc, 676 op->sym->auth.digest.length); 677 } 678 } 679 680 /** Process crypto operation for mbuf */ 681 static inline int 682 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op, 683 struct armv8_crypto_session *sess) 684 { 685 struct rte_mbuf *msrc, *mdst; 686 687 msrc = op->sym->m_src; 688 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src; 689 690 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 691 692 switch (sess->chain_order) { 693 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 694 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */ 695 process_armv8_chained_op(op, sess, msrc, mdst); 696 break; 697 default: 698 op->status = RTE_CRYPTO_OP_STATUS_ERROR; 699 break; 700 } 701 702 /* Free session if a session-less crypto op */ 703 if (op->sym->sess_type == RTE_CRYPTO_SYM_OP_SESSIONLESS) { 704 memset(sess, 0, sizeof(struct armv8_crypto_session)); 705 rte_mempool_put(qp->sess_mp, op->sym->session); 706 op->sym->session = NULL; 707 } 708 709 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) 710 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 711 712 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR)) 713 return -1; 714 715 return 0; 716 } 717 718 /* 719 *------------------------------------------------------------------------------ 720 * PMD Framework 721 *------------------------------------------------------------------------------ 722 */ 723 724 /** Enqueue burst */ 725 static uint16_t 726 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, 727 uint16_t nb_ops) 728 { 729 struct armv8_crypto_session *sess; 730 struct armv8_crypto_qp *qp = queue_pair; 731 int i, retval; 732 733 for (i = 0; i < nb_ops; i++) { 734 sess = get_session(qp, ops[i]); 735 if (unlikely(sess == NULL)) 736 goto enqueue_err; 737 738 retval = process_op(qp, ops[i], sess); 739 if (unlikely(retval < 0)) 740 goto enqueue_err; 741 } 742 743 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i, 744 NULL); 745 qp->stats.enqueued_count += retval; 746 747 return retval; 748 749 enqueue_err: 750 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i, 751 NULL); 752 if (ops[i] != NULL) 753 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 754 755 qp->stats.enqueue_err_count++; 756 return retval; 757 } 758 759 /** Dequeue burst */ 760 static uint16_t 761 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops, 762 uint16_t nb_ops) 763 { 764 struct armv8_crypto_qp *qp = queue_pair; 765 766 unsigned int nb_dequeued = 0; 767 768 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops, 769 (void **)ops, nb_ops, NULL); 770 qp->stats.dequeued_count += nb_dequeued; 771 772 return nb_dequeued; 773 } 774 775 /** Create ARMv8 crypto device */ 776 static int 777 cryptodev_armv8_crypto_create(const char *name, 778 struct rte_vdev_device *vdev, 779 struct rte_crypto_vdev_init_params *init_params) 780 { 781 struct rte_cryptodev *dev; 782 struct armv8_crypto_private *internals; 783 784 /* Check CPU for support for AES instruction set */ 785 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) { 786 ARMV8_CRYPTO_LOG_ERR( 787 "AES instructions not supported by CPU"); 788 return -EFAULT; 789 } 790 791 /* Check CPU for support for SHA instruction set */ 792 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) || 793 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) { 794 ARMV8_CRYPTO_LOG_ERR( 795 "SHA1/SHA2 instructions not supported by CPU"); 796 return -EFAULT; 797 } 798 799 /* Check CPU for support for Advance SIMD instruction set */ 800 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) { 801 ARMV8_CRYPTO_LOG_ERR( 802 "Advanced SIMD instructions not supported by CPU"); 803 return -EFAULT; 804 } 805 806 if (init_params->name[0] == '\0') 807 snprintf(init_params->name, sizeof(init_params->name), 808 "%s", name); 809 810 dev = rte_cryptodev_vdev_pmd_init(init_params->name, 811 sizeof(struct armv8_crypto_private), 812 init_params->socket_id, 813 vdev); 814 if (dev == NULL) { 815 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev"); 816 goto init_error; 817 } 818 819 dev->dev_type = RTE_CRYPTODEV_ARMV8_PMD; 820 dev->dev_ops = rte_armv8_crypto_pmd_ops; 821 822 /* register rx/tx burst functions for data path */ 823 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst; 824 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst; 825 826 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | 827 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | 828 RTE_CRYPTODEV_FF_CPU_NEON | 829 RTE_CRYPTODEV_FF_CPU_ARM_CE; 830 831 /* Set vector instructions mode supported */ 832 internals = dev->data->dev_private; 833 834 internals->max_nb_qpairs = init_params->max_nb_queue_pairs; 835 internals->max_nb_sessions = init_params->max_nb_sessions; 836 837 return 0; 838 839 init_error: 840 ARMV8_CRYPTO_LOG_ERR( 841 "driver %s: cryptodev_armv8_crypto_create failed", 842 init_params->name); 843 844 cryptodev_armv8_crypto_uninit(vdev); 845 return -EFAULT; 846 } 847 848 /** Initialise ARMv8 crypto device */ 849 static int 850 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev) 851 { 852 struct rte_crypto_vdev_init_params init_params = { 853 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS, 854 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS, 855 rte_socket_id(), 856 {0} 857 }; 858 const char *name; 859 const char *input_args; 860 861 name = rte_vdev_device_name(vdev); 862 if (name == NULL) 863 return -EINVAL; 864 input_args = rte_vdev_device_args(vdev); 865 rte_cryptodev_vdev_parse_init_params(&init_params, input_args); 866 867 RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name, 868 init_params.socket_id); 869 if (init_params.name[0] != '\0') { 870 RTE_LOG(INFO, PMD, " User defined name = %s\n", 871 init_params.name); 872 } 873 RTE_LOG(INFO, PMD, " Max number of queue pairs = %d\n", 874 init_params.max_nb_queue_pairs); 875 RTE_LOG(INFO, PMD, " Max number of sessions = %d\n", 876 init_params.max_nb_sessions); 877 878 return cryptodev_armv8_crypto_create(name, vdev, &init_params); 879 } 880 881 /** Uninitialise ARMv8 crypto device */ 882 static int 883 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev) 884 { 885 const char *name; 886 887 name = rte_vdev_device_name(vdev); 888 if (name == NULL) 889 return -EINVAL; 890 891 RTE_LOG(INFO, PMD, 892 "Closing ARMv8 crypto device %s on numa socket %u\n", 893 name, rte_socket_id()); 894 895 return 0; 896 } 897 898 static struct rte_vdev_driver armv8_crypto_drv = { 899 .probe = cryptodev_armv8_crypto_init, 900 .remove = cryptodev_armv8_crypto_uninit 901 }; 902 903 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv); 904 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd); 905 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD, 906 "max_nb_queue_pairs=<int> " 907 "max_nb_sessions=<int> " 908 "socket_id=<int>"); 909