1 /* 2 * BSD LICENSE 3 * 4 * Copyright (C) Cavium networks Ltd. 2017. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 10 * * Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * * Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in 14 * the documentation and/or other materials provided with the 15 * distribution. 16 * * Neither the name of Cavium networks nor the names of its 17 * contributors may be used to endorse or promote products derived 18 * from this software without specific prior written permission. 19 * 20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 */ 32 33 #include <stdbool.h> 34 35 #include <rte_common.h> 36 #include <rte_hexdump.h> 37 #include <rte_cryptodev.h> 38 #include <rte_cryptodev_pmd.h> 39 #include <rte_vdev.h> 40 #include <rte_malloc.h> 41 #include <rte_cpuflags.h> 42 43 #include "armv8_crypto_defs.h" 44 45 #include "rte_armv8_pmd_private.h" 46 47 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev); 48 49 /** 50 * Pointers to the supported combined mode crypto functions are stored 51 * in the static tables. Each combined (chained) cryptographic operation 52 * can be described by a set of numbers: 53 * - order: order of operations (cipher, auth) or (auth, cipher) 54 * - direction: encryption or decryption 55 * - calg: cipher algorithm such as AES_CBC, AES_CTR, etc. 56 * - aalg: authentication algorithm such as SHA1, SHA256, etc. 57 * - keyl: cipher key length, for example 128, 192, 256 bits 58 * 59 * In order to quickly acquire each function pointer based on those numbers, 60 * a hierarchy of arrays is maintained. The final level, 3D array is indexed 61 * by the combined mode function parameters only (cipher algorithm, 62 * authentication algorithm and key length). 63 * 64 * This gives 3 memory accesses to obtain a function pointer instead of 65 * traversing the array manually and comparing function parameters on each loop. 66 * 67 * +--+CRYPTO_FUNC 68 * +--+ENC| 69 * +--+CA| 70 * | +--+DEC 71 * ORDER| 72 * | +--+ENC 73 * +--+AC| 74 * +--+DEC 75 * 76 */ 77 78 /** 79 * 3D array type for ARM Combined Mode crypto functions pointers. 80 * CRYPTO_CIPHER_MAX: max cipher ID number 81 * CRYPTO_AUTH_MAX: max auth ID number 82 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number 83 */ 84 typedef const crypto_func_t 85 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX]; 86 87 /* Evaluate to key length definition */ 88 #define KEYL(keyl) (ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl) 89 90 /* Local aliases for supported ciphers */ 91 #define CIPH_AES_CBC RTE_CRYPTO_CIPHER_AES_CBC 92 /* Local aliases for supported hashes */ 93 #define AUTH_SHA1_HMAC RTE_CRYPTO_AUTH_SHA1_HMAC 94 #define AUTH_SHA256_HMAC RTE_CRYPTO_AUTH_SHA256_HMAC 95 96 /** 97 * Arrays containing pointers to particular cryptographic, 98 * combined mode functions. 99 * crypto_op_ca_encrypt: cipher (encrypt), authenticate 100 * crypto_op_ca_decrypt: cipher (decrypt), authenticate 101 * crypto_op_ac_encrypt: authenticate, cipher (encrypt) 102 * crypto_op_ac_decrypt: authenticate, cipher (decrypt) 103 */ 104 static const crypto_func_tbl_t 105 crypto_op_ca_encrypt = { 106 /* [cipher alg][auth alg][key length] = crypto_function, */ 107 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac, 108 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac, 109 }; 110 111 static const crypto_func_tbl_t 112 crypto_op_ca_decrypt = { 113 NULL 114 }; 115 116 static const crypto_func_tbl_t 117 crypto_op_ac_encrypt = { 118 NULL 119 }; 120 121 static const crypto_func_tbl_t 122 crypto_op_ac_decrypt = { 123 /* [cipher alg][auth alg][key length] = crypto_function, */ 124 [CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec, 125 [CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec, 126 }; 127 128 /** 129 * Arrays containing pointers to particular cryptographic function sets, 130 * covering given cipher operation directions (encrypt, decrypt) 131 * for each order of cipher and authentication pairs. 132 */ 133 static const crypto_func_tbl_t * 134 crypto_cipher_auth[] = { 135 &crypto_op_ca_encrypt, 136 &crypto_op_ca_decrypt, 137 NULL 138 }; 139 140 static const crypto_func_tbl_t * 141 crypto_auth_cipher[] = { 142 &crypto_op_ac_encrypt, 143 &crypto_op_ac_decrypt, 144 NULL 145 }; 146 147 /** 148 * Top level array containing pointers to particular cryptographic 149 * function sets, covering given order of chained operations. 150 * crypto_cipher_auth: cipher first, authenticate after 151 * crypto_auth_cipher: authenticate first, cipher after 152 */ 153 static const crypto_func_tbl_t ** 154 crypto_chain_order[] = { 155 crypto_cipher_auth, 156 crypto_auth_cipher, 157 NULL 158 }; 159 160 /** 161 * Extract particular combined mode crypto function from the 3D array. 162 */ 163 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl) \ 164 ({ \ 165 crypto_func_tbl_t *func_tbl = \ 166 (crypto_chain_order[(order)])[(cop)]; \ 167 \ 168 ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]); \ 169 }) 170 171 /*----------------------------------------------------------------------------*/ 172 173 /** 174 * 2D array type for ARM key schedule functions pointers. 175 * CRYPTO_CIPHER_MAX: max cipher ID number 176 * CRYPTO_CIPHER_KEYLEN_MAX: max key length ID number 177 */ 178 typedef const crypto_key_sched_t 179 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX]; 180 181 static const crypto_key_sched_tbl_t 182 crypto_key_sched_encrypt = { 183 /* [cipher alg][key length] = key_expand_func, */ 184 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc, 185 }; 186 187 static const crypto_key_sched_tbl_t 188 crypto_key_sched_decrypt = { 189 /* [cipher alg][key length] = key_expand_func, */ 190 [CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec, 191 }; 192 193 /** 194 * Top level array containing pointers to particular key generation 195 * function sets, covering given operation direction. 196 * crypto_key_sched_encrypt: keys for encryption 197 * crypto_key_sched_decrypt: keys for decryption 198 */ 199 static const crypto_key_sched_tbl_t * 200 crypto_key_sched_dir[] = { 201 &crypto_key_sched_encrypt, 202 &crypto_key_sched_decrypt, 203 NULL 204 }; 205 206 /** 207 * Extract particular combined mode crypto function from the 3D array. 208 */ 209 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl) \ 210 ({ \ 211 crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)]; \ 212 \ 213 ((*ks_tbl)[(calg)][KEYL(keyl)]); \ 214 }) 215 216 /*----------------------------------------------------------------------------*/ 217 218 /* 219 *------------------------------------------------------------------------------ 220 * Session Prepare 221 *------------------------------------------------------------------------------ 222 */ 223 224 /** Get xform chain order */ 225 static enum armv8_crypto_chain_order 226 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform) 227 { 228 229 /* 230 * This driver currently covers only chained operations. 231 * Ignore only cipher or only authentication operations 232 * or chains longer than 2 xform structures. 233 */ 234 if (xform->next == NULL || xform->next->next != NULL) 235 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED; 236 237 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) { 238 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) 239 return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER; 240 } 241 242 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) { 243 if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) 244 return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH; 245 } 246 247 return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED; 248 } 249 250 static inline void 251 auth_hmac_pad_prepare(struct armv8_crypto_session *sess, 252 const struct rte_crypto_sym_xform *xform) 253 { 254 size_t i; 255 256 /* Generate i_key_pad and o_key_pad */ 257 memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad)); 258 rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key, 259 xform->auth.key.length); 260 memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad)); 261 rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key, 262 xform->auth.key.length); 263 /* 264 * XOR key with IPAD/OPAD values to obtain i_key_pad 265 * and o_key_pad. 266 * Byte-by-byte operation may seem to be the less efficient 267 * here but in fact it's the opposite. 268 * The result ASM code is likely operate on NEON registers 269 * (load auth key to Qx, load IPAD/OPAD to multiple 270 * elements of Qy, eor 128 bits at once). 271 */ 272 for (i = 0; i < SHA_BLOCK_MAX; i++) { 273 sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE; 274 sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE; 275 } 276 } 277 278 static inline int 279 auth_set_prerequisites(struct armv8_crypto_session *sess, 280 const struct rte_crypto_sym_xform *xform) 281 { 282 uint8_t partial[64] = { 0 }; 283 int error; 284 285 switch (xform->auth.algo) { 286 case RTE_CRYPTO_AUTH_SHA1_HMAC: 287 /* 288 * Generate authentication key, i_key_pad and o_key_pad. 289 */ 290 /* Zero memory under key */ 291 memset(sess->auth.hmac.key, 0, SHA1_AUTH_KEY_LENGTH); 292 293 if (xform->auth.key.length > SHA1_AUTH_KEY_LENGTH) { 294 /* 295 * In case the key is longer than 160 bits 296 * the algorithm will use SHA1(key) instead. 297 */ 298 error = sha1_block(NULL, xform->auth.key.data, 299 sess->auth.hmac.key, xform->auth.key.length); 300 if (error != 0) 301 return -1; 302 } else { 303 /* 304 * Now copy the given authentication key to the session 305 * key assuming that the session key is zeroed there is 306 * no need for additional zero padding if the key is 307 * shorter than SHA1_AUTH_KEY_LENGTH. 308 */ 309 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data, 310 xform->auth.key.length); 311 } 312 313 /* Prepare HMAC padding: key|pattern */ 314 auth_hmac_pad_prepare(sess, xform); 315 /* 316 * Calculate partial hash values for i_key_pad and o_key_pad. 317 * Will be used as initialization state for final HMAC. 318 */ 319 error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad, 320 partial, SHA1_BLOCK_SIZE); 321 if (error != 0) 322 return -1; 323 memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE); 324 325 error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad, 326 partial, SHA1_BLOCK_SIZE); 327 if (error != 0) 328 return -1; 329 memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE); 330 331 break; 332 case RTE_CRYPTO_AUTH_SHA256_HMAC: 333 /* 334 * Generate authentication key, i_key_pad and o_key_pad. 335 */ 336 /* Zero memory under key */ 337 memset(sess->auth.hmac.key, 0, SHA256_AUTH_KEY_LENGTH); 338 339 if (xform->auth.key.length > SHA256_AUTH_KEY_LENGTH) { 340 /* 341 * In case the key is longer than 256 bits 342 * the algorithm will use SHA256(key) instead. 343 */ 344 error = sha256_block(NULL, xform->auth.key.data, 345 sess->auth.hmac.key, xform->auth.key.length); 346 if (error != 0) 347 return -1; 348 } else { 349 /* 350 * Now copy the given authentication key to the session 351 * key assuming that the session key is zeroed there is 352 * no need for additional zero padding if the key is 353 * shorter than SHA256_AUTH_KEY_LENGTH. 354 */ 355 rte_memcpy(sess->auth.hmac.key, xform->auth.key.data, 356 xform->auth.key.length); 357 } 358 359 /* Prepare HMAC padding: key|pattern */ 360 auth_hmac_pad_prepare(sess, xform); 361 /* 362 * Calculate partial hash values for i_key_pad and o_key_pad. 363 * Will be used as initialization state for final HMAC. 364 */ 365 error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad, 366 partial, SHA256_BLOCK_SIZE); 367 if (error != 0) 368 return -1; 369 memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE); 370 371 error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad, 372 partial, SHA256_BLOCK_SIZE); 373 if (error != 0) 374 return -1; 375 memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE); 376 377 break; 378 default: 379 break; 380 } 381 382 return 0; 383 } 384 385 static inline int 386 cipher_set_prerequisites(struct armv8_crypto_session *sess, 387 const struct rte_crypto_sym_xform *xform) 388 { 389 crypto_key_sched_t cipher_key_sched; 390 391 cipher_key_sched = sess->cipher.key_sched; 392 if (likely(cipher_key_sched != NULL)) { 393 /* Set up cipher session key */ 394 cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data); 395 } 396 397 return 0; 398 } 399 400 static int 401 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess, 402 const struct rte_crypto_sym_xform *cipher_xform, 403 const struct rte_crypto_sym_xform *auth_xform) 404 { 405 enum armv8_crypto_chain_order order; 406 enum armv8_crypto_cipher_operation cop; 407 enum rte_crypto_cipher_algorithm calg; 408 enum rte_crypto_auth_algorithm aalg; 409 410 /* Validate and prepare scratch order of combined operations */ 411 switch (sess->chain_order) { 412 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 413 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 414 order = sess->chain_order; 415 break; 416 default: 417 return -EINVAL; 418 } 419 /* Select cipher direction */ 420 sess->cipher.direction = cipher_xform->cipher.op; 421 /* Select cipher key */ 422 sess->cipher.key.length = cipher_xform->cipher.key.length; 423 /* Set cipher direction */ 424 cop = sess->cipher.direction; 425 /* Set cipher algorithm */ 426 calg = cipher_xform->cipher.algo; 427 428 /* Select cipher algo */ 429 switch (calg) { 430 /* Cover supported cipher algorithms */ 431 case RTE_CRYPTO_CIPHER_AES_CBC: 432 sess->cipher.algo = calg; 433 /* IV len is always 16 bytes (block size) for AES CBC */ 434 sess->cipher.iv_len = 16; 435 break; 436 default: 437 return -EINVAL; 438 } 439 /* Select auth generate/verify */ 440 sess->auth.operation = auth_xform->auth.op; 441 442 /* Select auth algo */ 443 switch (auth_xform->auth.algo) { 444 /* Cover supported hash algorithms */ 445 case RTE_CRYPTO_AUTH_SHA1_HMAC: 446 case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */ 447 aalg = auth_xform->auth.algo; 448 sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC; 449 break; 450 default: 451 return -EINVAL; 452 } 453 454 /* Verify supported key lengths and extract proper algorithm */ 455 switch (cipher_xform->cipher.key.length << 3) { 456 case 128: 457 sess->crypto_func = 458 CRYPTO_GET_ALGO(order, cop, calg, aalg, 128); 459 sess->cipher.key_sched = 460 CRYPTO_GET_KEY_SCHED(cop, calg, 128); 461 break; 462 case 192: 463 case 256: 464 /* These key lengths are not supported yet */ 465 default: /* Fall through */ 466 sess->crypto_func = NULL; 467 sess->cipher.key_sched = NULL; 468 return -EINVAL; 469 } 470 471 if (unlikely(sess->crypto_func == NULL)) { 472 /* 473 * If we got here that means that there must be a bug 474 * in the algorithms selection above. Nevertheless keep 475 * it here to catch bug immediately and avoid NULL pointer 476 * dereference in OPs processing. 477 */ 478 ARMV8_CRYPTO_LOG_ERR( 479 "No appropriate crypto function for given parameters"); 480 return -EINVAL; 481 } 482 483 /* Set up cipher session prerequisites */ 484 if (cipher_set_prerequisites(sess, cipher_xform) != 0) 485 return -EINVAL; 486 487 /* Set up authentication session prerequisites */ 488 if (auth_set_prerequisites(sess, auth_xform) != 0) 489 return -EINVAL; 490 491 return 0; 492 } 493 494 /** Parse crypto xform chain and set private session parameters */ 495 int 496 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess, 497 const struct rte_crypto_sym_xform *xform) 498 { 499 const struct rte_crypto_sym_xform *cipher_xform = NULL; 500 const struct rte_crypto_sym_xform *auth_xform = NULL; 501 bool is_chained_op; 502 int ret; 503 504 /* Filter out spurious/broken requests */ 505 if (xform == NULL) 506 return -EINVAL; 507 508 sess->chain_order = armv8_crypto_get_chain_order(xform); 509 switch (sess->chain_order) { 510 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 511 cipher_xform = xform; 512 auth_xform = xform->next; 513 is_chained_op = true; 514 break; 515 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 516 auth_xform = xform; 517 cipher_xform = xform->next; 518 is_chained_op = true; 519 break; 520 default: 521 is_chained_op = false; 522 return -EINVAL; 523 } 524 525 if (is_chained_op) { 526 ret = armv8_crypto_set_session_chained_parameters(sess, 527 cipher_xform, auth_xform); 528 if (unlikely(ret != 0)) { 529 ARMV8_CRYPTO_LOG_ERR( 530 "Invalid/unsupported chained (cipher/auth) parameters"); 531 return -EINVAL; 532 } 533 } else { 534 ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation"); 535 return -EINVAL; 536 } 537 538 return 0; 539 } 540 541 /** Provide session for operation */ 542 static inline struct armv8_crypto_session * 543 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op) 544 { 545 struct armv8_crypto_session *sess = NULL; 546 547 if (op->sym->sess_type == RTE_CRYPTO_SYM_OP_WITH_SESSION) { 548 /* get existing session */ 549 if (likely(op->sym->session != NULL && 550 op->sym->session->dev_type == 551 RTE_CRYPTODEV_ARMV8_PMD)) { 552 sess = (struct armv8_crypto_session *) 553 op->sym->session->_private; 554 } 555 } else { 556 /* provide internal session */ 557 void *_sess = NULL; 558 559 if (!rte_mempool_get(qp->sess_mp, (void **)&_sess)) { 560 sess = (struct armv8_crypto_session *) 561 ((struct rte_cryptodev_sym_session *)_sess) 562 ->_private; 563 564 if (unlikely(armv8_crypto_set_session_parameters( 565 sess, op->sym->xform) != 0)) { 566 rte_mempool_put(qp->sess_mp, _sess); 567 sess = NULL; 568 } else 569 op->sym->session = _sess; 570 } 571 } 572 573 if (unlikely(sess == NULL)) 574 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION; 575 576 return sess; 577 } 578 579 /* 580 *------------------------------------------------------------------------------ 581 * Process Operations 582 *------------------------------------------------------------------------------ 583 */ 584 585 /*----------------------------------------------------------------------------*/ 586 587 /** Process cipher operation */ 588 static inline void 589 process_armv8_chained_op 590 (struct rte_crypto_op *op, struct armv8_crypto_session *sess, 591 struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst) 592 { 593 crypto_func_t crypto_func; 594 crypto_arg_t arg; 595 struct rte_mbuf *m_asrc, *m_adst; 596 uint8_t *csrc, *cdst; 597 uint8_t *adst, *asrc; 598 uint64_t clen, alen; 599 int error; 600 601 clen = op->sym->cipher.data.length; 602 alen = op->sym->auth.data.length; 603 604 csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *, 605 op->sym->cipher.data.offset); 606 cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *, 607 op->sym->cipher.data.offset); 608 609 switch (sess->chain_order) { 610 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 611 m_asrc = m_adst = mbuf_dst; 612 break; 613 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: 614 m_asrc = mbuf_src; 615 m_adst = mbuf_dst; 616 break; 617 default: 618 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 619 return; 620 } 621 asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *, 622 op->sym->auth.data.offset); 623 624 switch (sess->auth.mode) { 625 case ARMV8_CRYPTO_AUTH_AS_AUTH: 626 /* Nothing to do here, just verify correct option */ 627 break; 628 case ARMV8_CRYPTO_AUTH_AS_HMAC: 629 arg.digest.hmac.key = sess->auth.hmac.key; 630 arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad; 631 arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad; 632 break; 633 default: 634 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 635 return; 636 } 637 638 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) { 639 adst = op->sym->auth.digest.data; 640 if (adst == NULL) { 641 adst = rte_pktmbuf_mtod_offset(m_adst, 642 uint8_t *, 643 op->sym->auth.data.offset + 644 op->sym->auth.data.length); 645 } 646 } else { 647 adst = (uint8_t *)rte_pktmbuf_append(m_asrc, 648 op->sym->auth.digest.length); 649 } 650 651 if (unlikely(op->sym->cipher.iv.length != sess->cipher.iv_len)) { 652 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 653 return; 654 } 655 656 arg.cipher.iv = op->sym->cipher.iv.data; 657 arg.cipher.key = sess->cipher.key.data; 658 /* Acquire combined mode function */ 659 crypto_func = sess->crypto_func; 660 ARMV8_CRYPTO_ASSERT(crypto_func != NULL); 661 error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg); 662 if (error != 0) { 663 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 664 return; 665 } 666 667 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 668 if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) { 669 if (memcmp(adst, op->sym->auth.digest.data, 670 op->sym->auth.digest.length) != 0) { 671 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED; 672 } 673 /* Trim area used for digest from mbuf. */ 674 rte_pktmbuf_trim(m_asrc, 675 op->sym->auth.digest.length); 676 } 677 } 678 679 /** Process crypto operation for mbuf */ 680 static inline int 681 process_op(const struct armv8_crypto_qp *qp, struct rte_crypto_op *op, 682 struct armv8_crypto_session *sess) 683 { 684 struct rte_mbuf *msrc, *mdst; 685 686 msrc = op->sym->m_src; 687 mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src; 688 689 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 690 691 switch (sess->chain_order) { 692 case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH: 693 case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */ 694 process_armv8_chained_op(op, sess, msrc, mdst); 695 break; 696 default: 697 op->status = RTE_CRYPTO_OP_STATUS_ERROR; 698 break; 699 } 700 701 /* Free session if a session-less crypto op */ 702 if (op->sym->sess_type == RTE_CRYPTO_SYM_OP_SESSIONLESS) { 703 memset(sess, 0, sizeof(struct armv8_crypto_session)); 704 rte_mempool_put(qp->sess_mp, op->sym->session); 705 op->sym->session = NULL; 706 } 707 708 if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED) 709 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 710 711 if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR)) 712 return -1; 713 714 return 0; 715 } 716 717 /* 718 *------------------------------------------------------------------------------ 719 * PMD Framework 720 *------------------------------------------------------------------------------ 721 */ 722 723 /** Enqueue burst */ 724 static uint16_t 725 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops, 726 uint16_t nb_ops) 727 { 728 struct armv8_crypto_session *sess; 729 struct armv8_crypto_qp *qp = queue_pair; 730 int i, retval; 731 732 for (i = 0; i < nb_ops; i++) { 733 sess = get_session(qp, ops[i]); 734 if (unlikely(sess == NULL)) 735 goto enqueue_err; 736 737 retval = process_op(qp, ops[i], sess); 738 if (unlikely(retval < 0)) 739 goto enqueue_err; 740 } 741 742 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i, 743 NULL); 744 qp->stats.enqueued_count += retval; 745 746 return retval; 747 748 enqueue_err: 749 retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i, 750 NULL); 751 if (ops[i] != NULL) 752 ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS; 753 754 qp->stats.enqueue_err_count++; 755 return retval; 756 } 757 758 /** Dequeue burst */ 759 static uint16_t 760 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops, 761 uint16_t nb_ops) 762 { 763 struct armv8_crypto_qp *qp = queue_pair; 764 765 unsigned int nb_dequeued = 0; 766 767 nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops, 768 (void **)ops, nb_ops, NULL); 769 qp->stats.dequeued_count += nb_dequeued; 770 771 return nb_dequeued; 772 } 773 774 /** Create ARMv8 crypto device */ 775 static int 776 cryptodev_armv8_crypto_create(const char *name, 777 struct rte_vdev_device *vdev, 778 struct rte_crypto_vdev_init_params *init_params) 779 { 780 struct rte_cryptodev *dev; 781 struct armv8_crypto_private *internals; 782 783 /* Check CPU for support for AES instruction set */ 784 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) { 785 ARMV8_CRYPTO_LOG_ERR( 786 "AES instructions not supported by CPU"); 787 return -EFAULT; 788 } 789 790 /* Check CPU for support for SHA instruction set */ 791 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) || 792 !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) { 793 ARMV8_CRYPTO_LOG_ERR( 794 "SHA1/SHA2 instructions not supported by CPU"); 795 return -EFAULT; 796 } 797 798 /* Check CPU for support for Advance SIMD instruction set */ 799 if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) { 800 ARMV8_CRYPTO_LOG_ERR( 801 "Advanced SIMD instructions not supported by CPU"); 802 return -EFAULT; 803 } 804 805 if (init_params->name[0] == '\0') 806 snprintf(init_params->name, sizeof(init_params->name), 807 "%s", name); 808 809 dev = rte_cryptodev_pmd_virtual_dev_init(init_params->name, 810 sizeof(struct armv8_crypto_private), 811 init_params->socket_id); 812 if (dev == NULL) { 813 ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev"); 814 goto init_error; 815 } 816 817 dev->dev_type = RTE_CRYPTODEV_ARMV8_PMD; 818 dev->dev_ops = rte_armv8_crypto_pmd_ops; 819 820 /* register rx/tx burst functions for data path */ 821 dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst; 822 dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst; 823 824 dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO | 825 RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING | 826 RTE_CRYPTODEV_FF_CPU_NEON | 827 RTE_CRYPTODEV_FF_CPU_ARM_CE; 828 829 /* Set vector instructions mode supported */ 830 internals = dev->data->dev_private; 831 832 internals->max_nb_qpairs = init_params->max_nb_queue_pairs; 833 internals->max_nb_sessions = init_params->max_nb_sessions; 834 835 return 0; 836 837 init_error: 838 ARMV8_CRYPTO_LOG_ERR( 839 "driver %s: cryptodev_armv8_crypto_create failed", 840 init_params->name); 841 842 cryptodev_armv8_crypto_uninit(vdev); 843 return -EFAULT; 844 } 845 846 /** Initialise ARMv8 crypto device */ 847 static int 848 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev) 849 { 850 struct rte_crypto_vdev_init_params init_params = { 851 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_QUEUE_PAIRS, 852 RTE_CRYPTODEV_VDEV_DEFAULT_MAX_NB_SESSIONS, 853 rte_socket_id(), 854 {0} 855 }; 856 const char *name; 857 const char *input_args; 858 859 name = rte_vdev_device_name(vdev); 860 if (name == NULL) 861 return -EINVAL; 862 input_args = rte_vdev_device_args(vdev); 863 rte_cryptodev_parse_vdev_init_params(&init_params, input_args); 864 865 RTE_LOG(INFO, PMD, "Initialising %s on NUMA node %d\n", name, 866 init_params.socket_id); 867 if (init_params.name[0] != '\0') { 868 RTE_LOG(INFO, PMD, " User defined name = %s\n", 869 init_params.name); 870 } 871 RTE_LOG(INFO, PMD, " Max number of queue pairs = %d\n", 872 init_params.max_nb_queue_pairs); 873 RTE_LOG(INFO, PMD, " Max number of sessions = %d\n", 874 init_params.max_nb_sessions); 875 876 return cryptodev_armv8_crypto_create(name, vdev, &init_params); 877 } 878 879 /** Uninitialise ARMv8 crypto device */ 880 static int 881 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev) 882 { 883 const char *name; 884 885 name = rte_vdev_device_name(vdev); 886 if (name == NULL) 887 return -EINVAL; 888 889 RTE_LOG(INFO, PMD, 890 "Closing ARMv8 crypto device %s on numa socket %u\n", 891 name, rte_socket_id()); 892 893 return 0; 894 } 895 896 static struct rte_vdev_driver armv8_crypto_drv = { 897 .probe = cryptodev_armv8_crypto_init, 898 .remove = cryptodev_armv8_crypto_uninit 899 }; 900 901 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_drv); 902 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd); 903 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD, 904 "max_nb_queue_pairs=<int> " 905 "max_nb_sessions=<int> " 906 "socket_id=<int>"); 907