1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved. 3 */ 4 5 #define OPENSSL_API_COMPAT 0x10100000L 6 7 #include <dirent.h> 8 #include <fcntl.h> 9 #include <stdio.h> 10 #include <string.h> 11 #include <sys/mman.h> 12 #include <sys/queue.h> 13 #include <sys/types.h> 14 #include <unistd.h> 15 #include <openssl/sha.h> 16 #include <openssl/cmac.h> /*sub key apis*/ 17 #include <openssl/evp.h> /*sub key apis*/ 18 19 #include <rte_hexdump.h> 20 #include <rte_memzone.h> 21 #include <rte_malloc.h> 22 #include <rte_memory.h> 23 #include <rte_spinlock.h> 24 #include <rte_string_fns.h> 25 #include <cryptodev_pmd.h> 26 27 #include "ccp_dev.h" 28 #include "ccp_crypto.h" 29 #include "ccp_pci.h" 30 #include "ccp_pmd_private.h" 31 32 #include <openssl/conf.h> 33 #include <openssl/err.h> 34 #include <openssl/hmac.h> 35 36 extern int iommu_mode; 37 void *sha_ctx; 38 /* SHA initial context values */ 39 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = { 40 SHA1_H4, SHA1_H3, 41 SHA1_H2, SHA1_H1, 42 SHA1_H0, 0x0U, 43 0x0U, 0x0U, 44 }; 45 46 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = { 47 SHA224_H7, SHA224_H6, 48 SHA224_H5, SHA224_H4, 49 SHA224_H3, SHA224_H2, 50 SHA224_H1, SHA224_H0, 51 }; 52 53 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = { 54 SHA256_H7, SHA256_H6, 55 SHA256_H5, SHA256_H4, 56 SHA256_H3, SHA256_H2, 57 SHA256_H1, SHA256_H0, 58 }; 59 60 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = { 61 SHA384_H7, SHA384_H6, 62 SHA384_H5, SHA384_H4, 63 SHA384_H3, SHA384_H2, 64 SHA384_H1, SHA384_H0, 65 }; 66 67 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = { 68 SHA512_H7, SHA512_H6, 69 SHA512_H5, SHA512_H4, 70 SHA512_H3, SHA512_H2, 71 SHA512_H1, SHA512_H0, 72 }; 73 74 #if defined(_MSC_VER) 75 #define SHA3_CONST(x) x 76 #else 77 #define SHA3_CONST(x) x##L 78 #endif 79 80 /** 'Words' here refers to uint64_t */ 81 #define SHA3_KECCAK_SPONGE_WORDS \ 82 (((1600) / 8) / sizeof(uint64_t)) 83 typedef struct sha3_context_ { 84 uint64_t saved; 85 /** 86 * The portion of the input message that we 87 * didn't consume yet 88 */ 89 union { 90 uint64_t s[SHA3_KECCAK_SPONGE_WORDS]; 91 /* Keccak's state */ 92 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8]; 93 /**total 200 ctx size**/ 94 }; 95 unsigned int byteIndex; 96 /** 97 * 0..7--the next byte after the set one 98 * (starts from 0; 0--none are buffered) 99 */ 100 unsigned int wordIndex; 101 /** 102 * 0..24--the next word to integrate input 103 * (starts from 0) 104 */ 105 unsigned int capacityWords; 106 /** 107 * the double size of the hash output in 108 * words (e.g. 16 for Keccak 512) 109 */ 110 } sha3_context; 111 112 #ifndef SHA3_ROTL64 113 #define SHA3_ROTL64(x, y) \ 114 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y)))) 115 #endif 116 117 static const uint64_t keccakf_rndc[24] = { 118 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL), 119 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL), 120 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL), 121 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL), 122 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL), 123 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL), 124 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL), 125 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL), 126 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL), 127 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL), 128 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL), 129 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL) 130 }; 131 132 static const unsigned int keccakf_rotc[24] = { 133 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 134 18, 39, 61, 20, 44 135 }; 136 137 static const unsigned int keccakf_piln[24] = { 138 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 139 14, 22, 9, 6, 1 140 }; 141 142 static enum ccp_cmd_order 143 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform) 144 { 145 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED; 146 147 if (xform == NULL) 148 return res; 149 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) { 150 if (xform->next == NULL) 151 return CCP_CMD_AUTH; 152 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) 153 return CCP_CMD_HASH_CIPHER; 154 } 155 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) { 156 if (xform->next == NULL) 157 return CCP_CMD_CIPHER; 158 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) 159 return CCP_CMD_CIPHER_HASH; 160 } 161 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD) 162 return CCP_CMD_COMBINED; 163 return res; 164 } 165 166 /* partial hash using openssl */ 167 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out) 168 { 169 SHA_CTX ctx; 170 171 if (!SHA1_Init(&ctx)) 172 return -EFAULT; 173 SHA1_Transform(&ctx, data_in); 174 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH); 175 return 0; 176 } 177 178 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out) 179 { 180 SHA256_CTX ctx; 181 182 if (!SHA224_Init(&ctx)) 183 return -EFAULT; 184 SHA256_Transform(&ctx, data_in); 185 rte_memcpy(data_out, &ctx, 186 SHA256_DIGEST_LENGTH); 187 return 0; 188 } 189 190 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out) 191 { 192 SHA256_CTX ctx; 193 194 if (!SHA256_Init(&ctx)) 195 return -EFAULT; 196 SHA256_Transform(&ctx, data_in); 197 rte_memcpy(data_out, &ctx, 198 SHA256_DIGEST_LENGTH); 199 return 0; 200 } 201 202 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out) 203 { 204 SHA512_CTX ctx; 205 206 if (!SHA384_Init(&ctx)) 207 return -EFAULT; 208 SHA512_Transform(&ctx, data_in); 209 rte_memcpy(data_out, &ctx, 210 SHA512_DIGEST_LENGTH); 211 return 0; 212 } 213 214 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out) 215 { 216 SHA512_CTX ctx; 217 218 if (!SHA512_Init(&ctx)) 219 return -EFAULT; 220 SHA512_Transform(&ctx, data_in); 221 rte_memcpy(data_out, &ctx, 222 SHA512_DIGEST_LENGTH); 223 return 0; 224 } 225 226 static void 227 keccakf(uint64_t s[25]) 228 { 229 int i, j, round; 230 uint64_t t, bc[5]; 231 #define KECCAK_ROUNDS 24 232 233 for (round = 0; round < KECCAK_ROUNDS; round++) { 234 235 /* Theta */ 236 for (i = 0; i < 5; i++) 237 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^ 238 s[i + 20]; 239 240 for (i = 0; i < 5; i++) { 241 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1); 242 for (j = 0; j < 25; j += 5) 243 s[j + i] ^= t; 244 } 245 246 /* Rho Pi */ 247 t = s[1]; 248 for (i = 0; i < 24; i++) { 249 j = keccakf_piln[i]; 250 bc[0] = s[j]; 251 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]); 252 t = bc[0]; 253 } 254 255 /* Chi */ 256 for (j = 0; j < 25; j += 5) { 257 for (i = 0; i < 5; i++) 258 bc[i] = s[j + i]; 259 for (i = 0; i < 5; i++) 260 s[j + i] ^= (~bc[(i + 1) % 5]) & 261 bc[(i + 2) % 5]; 262 } 263 264 /* Iota */ 265 s[0] ^= keccakf_rndc[round]; 266 } 267 } 268 269 static void 270 sha3_Init224(void *priv) 271 { 272 sha3_context *ctx = (sha3_context *) priv; 273 274 memset(ctx, 0, sizeof(*ctx)); 275 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t)); 276 } 277 278 static void 279 sha3_Init256(void *priv) 280 { 281 sha3_context *ctx = (sha3_context *) priv; 282 283 memset(ctx, 0, sizeof(*ctx)); 284 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t)); 285 } 286 287 static void 288 sha3_Init384(void *priv) 289 { 290 sha3_context *ctx = (sha3_context *) priv; 291 292 memset(ctx, 0, sizeof(*ctx)); 293 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t)); 294 } 295 296 static void 297 sha3_Init512(void *priv) 298 { 299 sha3_context *ctx = (sha3_context *) priv; 300 301 memset(ctx, 0, sizeof(*ctx)); 302 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t)); 303 } 304 305 306 /* This is simply the 'update' with the padding block. 307 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80 308 * bytes are always present, but they can be the same byte. 309 */ 310 static void 311 sha3_Update(void *priv, void const *bufIn, size_t len) 312 { 313 sha3_context *ctx = (sha3_context *) priv; 314 unsigned int old_tail = (8 - ctx->byteIndex) & 7; 315 size_t words; 316 unsigned int tail; 317 size_t i; 318 const uint8_t *buf = bufIn; 319 320 if (len < old_tail) { 321 while (len--) 322 ctx->saved |= (uint64_t) (*(buf++)) << 323 ((ctx->byteIndex++) * 8); 324 return; 325 } 326 327 if (old_tail) { 328 len -= old_tail; 329 while (old_tail--) 330 ctx->saved |= (uint64_t) (*(buf++)) << 331 ((ctx->byteIndex++) * 8); 332 333 ctx->s[ctx->wordIndex] ^= ctx->saved; 334 ctx->byteIndex = 0; 335 ctx->saved = 0; 336 if (++ctx->wordIndex == 337 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) { 338 keccakf(ctx->s); 339 ctx->wordIndex = 0; 340 } 341 } 342 343 words = len / sizeof(uint64_t); 344 tail = len - words * sizeof(uint64_t); 345 346 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) { 347 const uint64_t t = (uint64_t) (buf[0]) | 348 ((uint64_t) (buf[1]) << 8 * 1) | 349 ((uint64_t) (buf[2]) << 8 * 2) | 350 ((uint64_t) (buf[3]) << 8 * 3) | 351 ((uint64_t) (buf[4]) << 8 * 4) | 352 ((uint64_t) (buf[5]) << 8 * 5) | 353 ((uint64_t) (buf[6]) << 8 * 6) | 354 ((uint64_t) (buf[7]) << 8 * 7); 355 ctx->s[ctx->wordIndex] ^= t; 356 if (++ctx->wordIndex == 357 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) { 358 keccakf(ctx->s); 359 ctx->wordIndex = 0; 360 } 361 } 362 363 while (tail--) 364 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8); 365 } 366 367 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out) 368 { 369 sha3_context *ctx; 370 int i; 371 372 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0); 373 if (!ctx) { 374 CCP_LOG_ERR("sha3-ctx creation failed"); 375 return -ENOMEM; 376 } 377 sha3_Init224(ctx); 378 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE); 379 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++) 380 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1]; 381 rte_free(ctx); 382 383 return 0; 384 } 385 386 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out) 387 { 388 sha3_context *ctx; 389 int i; 390 391 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0); 392 if (!ctx) { 393 CCP_LOG_ERR("sha3-ctx creation failed"); 394 return -ENOMEM; 395 } 396 sha3_Init256(ctx); 397 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE); 398 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++) 399 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1]; 400 rte_free(ctx); 401 402 return 0; 403 } 404 405 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out) 406 { 407 sha3_context *ctx; 408 int i; 409 410 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0); 411 if (!ctx) { 412 CCP_LOG_ERR("sha3-ctx creation failed"); 413 return -ENOMEM; 414 } 415 sha3_Init384(ctx); 416 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE); 417 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++) 418 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1]; 419 rte_free(ctx); 420 421 return 0; 422 } 423 424 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out) 425 { 426 sha3_context *ctx; 427 int i; 428 429 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0); 430 if (!ctx) { 431 CCP_LOG_ERR("sha3-ctx creation failed"); 432 return -ENOMEM; 433 } 434 sha3_Init512(ctx); 435 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE); 436 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++) 437 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1]; 438 rte_free(ctx); 439 440 return 0; 441 } 442 443 static int generate_partial_hash(struct ccp_session *sess) 444 { 445 446 uint8_t ipad[sess->auth.block_size]; 447 uint8_t opad[sess->auth.block_size]; 448 uint8_t *ipad_t, *opad_t; 449 uint32_t *hash_value_be32, hash_temp32[8]; 450 uint64_t *hash_value_be64, hash_temp64[8]; 451 int i, count; 452 uint8_t *hash_value_sha3; 453 454 opad_t = ipad_t = (uint8_t *)sess->auth.key; 455 456 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute); 457 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute); 458 459 /* considering key size is always equal to block size of algorithm */ 460 for (i = 0; i < sess->auth.block_size; i++) { 461 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE); 462 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE); 463 } 464 465 switch (sess->auth.algo) { 466 case CCP_AUTH_ALGO_SHA1_HMAC: 467 count = SHA1_DIGEST_SIZE >> 2; 468 469 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32)) 470 return -1; 471 for (i = 0; i < count; i++, hash_value_be32++) 472 *hash_value_be32 = hash_temp32[count - 1 - i]; 473 474 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute 475 + sess->auth.ctx_len); 476 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32)) 477 return -1; 478 for (i = 0; i < count; i++, hash_value_be32++) 479 *hash_value_be32 = hash_temp32[count - 1 - i]; 480 return 0; 481 case CCP_AUTH_ALGO_SHA224_HMAC: 482 count = SHA256_DIGEST_SIZE >> 2; 483 484 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32)) 485 return -1; 486 for (i = 0; i < count; i++, hash_value_be32++) 487 *hash_value_be32 = hash_temp32[count - 1 - i]; 488 489 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute 490 + sess->auth.ctx_len); 491 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32)) 492 return -1; 493 for (i = 0; i < count; i++, hash_value_be32++) 494 *hash_value_be32 = hash_temp32[count - 1 - i]; 495 return 0; 496 case CCP_AUTH_ALGO_SHA3_224_HMAC: 497 hash_value_sha3 = sess->auth.pre_compute; 498 if (partial_hash_sha3_224(ipad, hash_value_sha3)) 499 return -1; 500 501 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute 502 + sess->auth.ctx_len); 503 if (partial_hash_sha3_224(opad, hash_value_sha3)) 504 return -1; 505 return 0; 506 case CCP_AUTH_ALGO_SHA256_HMAC: 507 count = SHA256_DIGEST_SIZE >> 2; 508 509 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32)) 510 return -1; 511 for (i = 0; i < count; i++, hash_value_be32++) 512 *hash_value_be32 = hash_temp32[count - 1 - i]; 513 514 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute 515 + sess->auth.ctx_len); 516 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32)) 517 return -1; 518 for (i = 0; i < count; i++, hash_value_be32++) 519 *hash_value_be32 = hash_temp32[count - 1 - i]; 520 return 0; 521 case CCP_AUTH_ALGO_SHA3_256_HMAC: 522 hash_value_sha3 = sess->auth.pre_compute; 523 if (partial_hash_sha3_256(ipad, hash_value_sha3)) 524 return -1; 525 526 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute 527 + sess->auth.ctx_len); 528 if (partial_hash_sha3_256(opad, hash_value_sha3)) 529 return -1; 530 return 0; 531 case CCP_AUTH_ALGO_SHA384_HMAC: 532 count = SHA512_DIGEST_SIZE >> 3; 533 534 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64)) 535 return -1; 536 for (i = 0; i < count; i++, hash_value_be64++) 537 *hash_value_be64 = hash_temp64[count - 1 - i]; 538 539 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute 540 + sess->auth.ctx_len); 541 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64)) 542 return -1; 543 for (i = 0; i < count; i++, hash_value_be64++) 544 *hash_value_be64 = hash_temp64[count - 1 - i]; 545 return 0; 546 case CCP_AUTH_ALGO_SHA3_384_HMAC: 547 hash_value_sha3 = sess->auth.pre_compute; 548 if (partial_hash_sha3_384(ipad, hash_value_sha3)) 549 return -1; 550 551 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute 552 + sess->auth.ctx_len); 553 if (partial_hash_sha3_384(opad, hash_value_sha3)) 554 return -1; 555 return 0; 556 case CCP_AUTH_ALGO_SHA512_HMAC: 557 count = SHA512_DIGEST_SIZE >> 3; 558 559 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64)) 560 return -1; 561 for (i = 0; i < count; i++, hash_value_be64++) 562 *hash_value_be64 = hash_temp64[count - 1 - i]; 563 564 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute 565 + sess->auth.ctx_len); 566 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64)) 567 return -1; 568 for (i = 0; i < count; i++, hash_value_be64++) 569 *hash_value_be64 = hash_temp64[count - 1 - i]; 570 return 0; 571 case CCP_AUTH_ALGO_SHA3_512_HMAC: 572 hash_value_sha3 = sess->auth.pre_compute; 573 if (partial_hash_sha3_512(ipad, hash_value_sha3)) 574 return -1; 575 576 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute 577 + sess->auth.ctx_len); 578 if (partial_hash_sha3_512(opad, hash_value_sha3)) 579 return -1; 580 return 0; 581 default: 582 CCP_LOG_ERR("Invalid auth algo"); 583 return -1; 584 } 585 } 586 587 /* prepare temporary keys K1 and K2 */ 588 static void prepare_key(unsigned char *k, unsigned char *l, int bl) 589 { 590 int i; 591 /* Shift block to left, including carry */ 592 for (i = 0; i < bl; i++) { 593 k[i] = l[i] << 1; 594 if (i < bl - 1 && l[i + 1] & 0x80) 595 k[i] |= 1; 596 } 597 /* If MSB set fixup with R */ 598 if (l[0] & 0x80) 599 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b; 600 } 601 602 /* subkeys K1 and K2 generation for CMAC */ 603 static int 604 generate_cmac_subkeys(struct ccp_session *sess) 605 { 606 const EVP_CIPHER *algo; 607 EVP_CIPHER_CTX *ctx; 608 unsigned char *ccp_ctx; 609 size_t i; 610 int dstlen, totlen; 611 unsigned char zero_iv[AES_BLOCK_SIZE] = {0}; 612 unsigned char dst[2 * AES_BLOCK_SIZE] = {0}; 613 unsigned char k1[AES_BLOCK_SIZE] = {0}; 614 unsigned char k2[AES_BLOCK_SIZE] = {0}; 615 616 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128) 617 algo = EVP_aes_128_cbc(); 618 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192) 619 algo = EVP_aes_192_cbc(); 620 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256) 621 algo = EVP_aes_256_cbc(); 622 else { 623 CCP_LOG_ERR("Invalid CMAC type length"); 624 return -1; 625 } 626 627 ctx = EVP_CIPHER_CTX_new(); 628 if (!ctx) { 629 CCP_LOG_ERR("ctx creation failed"); 630 return -1; 631 } 632 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key, 633 (unsigned char *)zero_iv) <= 0) 634 goto key_generate_err; 635 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0) 636 goto key_generate_err; 637 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv, 638 AES_BLOCK_SIZE) <= 0) 639 goto key_generate_err; 640 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0) 641 goto key_generate_err; 642 643 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2); 644 645 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1); 646 prepare_key(k1, dst, AES_BLOCK_SIZE); 647 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--) 648 *ccp_ctx = k1[i]; 649 650 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + 651 (2 * CCP_SB_BYTES) - 1); 652 prepare_key(k2, k1, AES_BLOCK_SIZE); 653 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--) 654 *ccp_ctx = k2[i]; 655 656 EVP_CIPHER_CTX_free(ctx); 657 658 return 0; 659 660 key_generate_err: 661 CCP_LOG_ERR("CMAC Init failed"); 662 return -1; 663 } 664 665 /* configure session */ 666 static int 667 ccp_configure_session_cipher(struct ccp_session *sess, 668 const struct rte_crypto_sym_xform *xform) 669 { 670 const struct rte_crypto_cipher_xform *cipher_xform = NULL; 671 size_t i, j, x; 672 673 cipher_xform = &xform->cipher; 674 675 /* set cipher direction */ 676 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) 677 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT; 678 else 679 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT; 680 681 /* set cipher key */ 682 sess->cipher.key_length = cipher_xform->key.length; 683 rte_memcpy(sess->cipher.key, cipher_xform->key.data, 684 cipher_xform->key.length); 685 686 /* set iv parameters */ 687 sess->iv.offset = cipher_xform->iv.offset; 688 sess->iv.length = cipher_xform->iv.length; 689 690 switch (cipher_xform->algo) { 691 case RTE_CRYPTO_CIPHER_AES_CTR: 692 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR; 693 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR; 694 sess->cipher.engine = CCP_ENGINE_AES; 695 break; 696 case RTE_CRYPTO_CIPHER_AES_ECB: 697 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC; 698 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB; 699 sess->cipher.engine = CCP_ENGINE_AES; 700 break; 701 case RTE_CRYPTO_CIPHER_AES_CBC: 702 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC; 703 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC; 704 sess->cipher.engine = CCP_ENGINE_AES; 705 break; 706 case RTE_CRYPTO_CIPHER_3DES_CBC: 707 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC; 708 sess->cipher.um.des_mode = CCP_DES_MODE_CBC; 709 sess->cipher.engine = CCP_ENGINE_3DES; 710 break; 711 default: 712 CCP_LOG_ERR("Unsupported cipher algo"); 713 return -1; 714 } 715 716 717 switch (sess->cipher.engine) { 718 case CCP_ENGINE_AES: 719 if (sess->cipher.key_length == 16) 720 sess->cipher.ut.aes_type = CCP_AES_TYPE_128; 721 else if (sess->cipher.key_length == 24) 722 sess->cipher.ut.aes_type = CCP_AES_TYPE_192; 723 else if (sess->cipher.key_length == 32) 724 sess->cipher.ut.aes_type = CCP_AES_TYPE_256; 725 else { 726 CCP_LOG_ERR("Invalid cipher key length"); 727 return -1; 728 } 729 for (i = 0; i < sess->cipher.key_length ; i++) 730 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] = 731 sess->cipher.key[i]; 732 break; 733 case CCP_ENGINE_3DES: 734 if (sess->cipher.key_length == 16) 735 sess->cipher.ut.des_type = CCP_DES_TYPE_128; 736 else if (sess->cipher.key_length == 24) 737 sess->cipher.ut.des_type = CCP_DES_TYPE_192; 738 else { 739 CCP_LOG_ERR("Invalid cipher key length"); 740 return -1; 741 } 742 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8) 743 for (i = 0; i < 8; i++) 744 sess->cipher.key_ccp[(8 + x) - i - 1] = 745 sess->cipher.key[i + x]; 746 break; 747 default: 748 CCP_LOG_ERR("Invalid CCP Engine"); 749 return -ENOTSUP; 750 } 751 if (iommu_mode == 2) { 752 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce); 753 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp); 754 } else { 755 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce); 756 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp); 757 } 758 return 0; 759 } 760 761 static int 762 ccp_configure_session_auth(struct ccp_session *sess, 763 const struct rte_crypto_sym_xform *xform) 764 { 765 const struct rte_crypto_auth_xform *auth_xform = NULL; 766 size_t i; 767 768 auth_xform = &xform->auth; 769 770 sess->auth.digest_length = auth_xform->digest_length; 771 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE) 772 sess->auth.op = CCP_AUTH_OP_GENERATE; 773 else 774 sess->auth.op = CCP_AUTH_OP_VERIFY; 775 switch (auth_xform->algo) { 776 case RTE_CRYPTO_AUTH_MD5_HMAC: 777 if (sess->auth_opt) { 778 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC; 779 sess->auth.offset = ((CCP_SB_BYTES << 1) - 780 MD5_DIGEST_SIZE); 781 sess->auth.key_length = auth_xform->key.length; 782 sess->auth.block_size = MD5_BLOCK_SIZE; 783 memset(sess->auth.key, 0, sess->auth.block_size); 784 rte_memcpy(sess->auth.key, auth_xform->key.data, 785 auth_xform->key.length); 786 } else 787 return -1; /* HMAC MD5 not supported on CCP */ 788 break; 789 case RTE_CRYPTO_AUTH_SHA1: 790 sess->auth.engine = CCP_ENGINE_SHA; 791 sess->auth.algo = CCP_AUTH_ALGO_SHA1; 792 sess->auth.ut.sha_type = CCP_SHA_TYPE_1; 793 sess->auth.ctx = (void *)ccp_sha1_init; 794 sess->auth.ctx_len = CCP_SB_BYTES; 795 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE; 796 rte_memcpy(sha_ctx, sess->auth.ctx, SHA_COMMON_DIGEST_SIZE); 797 break; 798 case RTE_CRYPTO_AUTH_SHA1_HMAC: 799 if (sess->auth_opt) { 800 if (auth_xform->key.length > SHA1_BLOCK_SIZE) 801 return -1; 802 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC; 803 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE; 804 sess->auth.block_size = SHA1_BLOCK_SIZE; 805 sess->auth.key_length = auth_xform->key.length; 806 memset(sess->auth.key, 0, sess->auth.block_size); 807 rte_memcpy(sess->auth.key, auth_xform->key.data, 808 auth_xform->key.length); 809 } else { 810 if (auth_xform->key.length > SHA1_BLOCK_SIZE) 811 return -1; 812 sess->auth.engine = CCP_ENGINE_SHA; 813 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC; 814 sess->auth.ut.sha_type = CCP_SHA_TYPE_1; 815 sess->auth.ctx_len = CCP_SB_BYTES; 816 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE; 817 sess->auth.block_size = SHA1_BLOCK_SIZE; 818 sess->auth.key_length = auth_xform->key.length; 819 memset(sess->auth.key, 0, sess->auth.block_size); 820 memset(sess->auth.pre_compute, 0, 821 sess->auth.ctx_len << 1); 822 rte_memcpy(sess->auth.key, auth_xform->key.data, 823 auth_xform->key.length); 824 if (generate_partial_hash(sess)) 825 return -1; 826 } 827 break; 828 case RTE_CRYPTO_AUTH_SHA224: 829 sess->auth.algo = CCP_AUTH_ALGO_SHA224; 830 sess->auth.engine = CCP_ENGINE_SHA; 831 sess->auth.ut.sha_type = CCP_SHA_TYPE_224; 832 sess->auth.ctx = (void *)ccp_sha224_init; 833 sess->auth.ctx_len = CCP_SB_BYTES; 834 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE; 835 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE); 836 break; 837 case RTE_CRYPTO_AUTH_SHA224_HMAC: 838 if (sess->auth_opt) { 839 if (auth_xform->key.length > SHA224_BLOCK_SIZE) 840 return -1; 841 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC; 842 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE; 843 sess->auth.block_size = SHA224_BLOCK_SIZE; 844 sess->auth.key_length = auth_xform->key.length; 845 memset(sess->auth.key, 0, sess->auth.block_size); 846 rte_memcpy(sess->auth.key, auth_xform->key.data, 847 auth_xform->key.length); 848 } else { 849 if (auth_xform->key.length > SHA224_BLOCK_SIZE) 850 return -1; 851 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC; 852 sess->auth.engine = CCP_ENGINE_SHA; 853 sess->auth.ut.sha_type = CCP_SHA_TYPE_224; 854 sess->auth.ctx_len = CCP_SB_BYTES; 855 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE; 856 sess->auth.block_size = SHA224_BLOCK_SIZE; 857 sess->auth.key_length = auth_xform->key.length; 858 memset(sess->auth.key, 0, sess->auth.block_size); 859 memset(sess->auth.pre_compute, 0, 860 sess->auth.ctx_len << 1); 861 rte_memcpy(sess->auth.key, auth_xform->key.data, 862 auth_xform->key.length); 863 if (generate_partial_hash(sess)) 864 return -1; 865 } 866 break; 867 case RTE_CRYPTO_AUTH_SHA3_224: 868 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224; 869 sess->auth.engine = CCP_ENGINE_SHA; 870 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224; 871 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 872 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE; 873 break; 874 case RTE_CRYPTO_AUTH_SHA3_224_HMAC: 875 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE) 876 return -1; 877 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC; 878 sess->auth.engine = CCP_ENGINE_SHA; 879 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224; 880 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 881 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE; 882 sess->auth.block_size = SHA3_224_BLOCK_SIZE; 883 sess->auth.key_length = auth_xform->key.length; 884 memset(sess->auth.key, 0, sess->auth.block_size); 885 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len); 886 rte_memcpy(sess->auth.key, auth_xform->key.data, 887 auth_xform->key.length); 888 if (generate_partial_hash(sess)) 889 return -1; 890 break; 891 case RTE_CRYPTO_AUTH_SHA256: 892 sess->auth.algo = CCP_AUTH_ALGO_SHA256; 893 sess->auth.engine = CCP_ENGINE_SHA; 894 sess->auth.ut.sha_type = CCP_SHA_TYPE_256; 895 sess->auth.ctx = (void *)ccp_sha256_init; 896 sess->auth.ctx_len = CCP_SB_BYTES; 897 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE; 898 rte_memcpy(sha_ctx, sess->auth.ctx, SHA256_DIGEST_SIZE); 899 break; 900 case RTE_CRYPTO_AUTH_SHA256_HMAC: 901 if (sess->auth_opt) { 902 if (auth_xform->key.length > SHA256_BLOCK_SIZE) 903 return -1; 904 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC; 905 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE; 906 sess->auth.block_size = SHA256_BLOCK_SIZE; 907 sess->auth.key_length = auth_xform->key.length; 908 memset(sess->auth.key, 0, sess->auth.block_size); 909 rte_memcpy(sess->auth.key, auth_xform->key.data, 910 auth_xform->key.length); 911 } else { 912 if (auth_xform->key.length > SHA256_BLOCK_SIZE) 913 return -1; 914 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC; 915 sess->auth.engine = CCP_ENGINE_SHA; 916 sess->auth.ut.sha_type = CCP_SHA_TYPE_256; 917 sess->auth.ctx_len = CCP_SB_BYTES; 918 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE; 919 sess->auth.block_size = SHA256_BLOCK_SIZE; 920 sess->auth.key_length = auth_xform->key.length; 921 memset(sess->auth.key, 0, sess->auth.block_size); 922 memset(sess->auth.pre_compute, 0, 923 sess->auth.ctx_len << 1); 924 rte_memcpy(sess->auth.key, auth_xform->key.data, 925 auth_xform->key.length); 926 if (generate_partial_hash(sess)) 927 return -1; 928 } 929 break; 930 case RTE_CRYPTO_AUTH_SHA3_256: 931 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256; 932 sess->auth.engine = CCP_ENGINE_SHA; 933 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256; 934 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 935 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE; 936 break; 937 case RTE_CRYPTO_AUTH_SHA3_256_HMAC: 938 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE) 939 return -1; 940 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC; 941 sess->auth.engine = CCP_ENGINE_SHA; 942 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256; 943 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 944 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE; 945 sess->auth.block_size = SHA3_256_BLOCK_SIZE; 946 sess->auth.key_length = auth_xform->key.length; 947 memset(sess->auth.key, 0, sess->auth.block_size); 948 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len); 949 rte_memcpy(sess->auth.key, auth_xform->key.data, 950 auth_xform->key.length); 951 if (generate_partial_hash(sess)) 952 return -1; 953 break; 954 case RTE_CRYPTO_AUTH_SHA384: 955 sess->auth.algo = CCP_AUTH_ALGO_SHA384; 956 sess->auth.engine = CCP_ENGINE_SHA; 957 sess->auth.ut.sha_type = CCP_SHA_TYPE_384; 958 sess->auth.ctx = (void *)ccp_sha384_init; 959 sess->auth.ctx_len = CCP_SB_BYTES << 1; 960 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE; 961 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE); 962 break; 963 case RTE_CRYPTO_AUTH_SHA384_HMAC: 964 if (sess->auth_opt) { 965 if (auth_xform->key.length > SHA384_BLOCK_SIZE) 966 return -1; 967 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC; 968 sess->auth.offset = ((CCP_SB_BYTES << 1) - 969 SHA384_DIGEST_SIZE); 970 sess->auth.block_size = SHA384_BLOCK_SIZE; 971 sess->auth.key_length = auth_xform->key.length; 972 memset(sess->auth.key, 0, sess->auth.block_size); 973 rte_memcpy(sess->auth.key, auth_xform->key.data, 974 auth_xform->key.length); 975 } else { 976 if (auth_xform->key.length > SHA384_BLOCK_SIZE) 977 return -1; 978 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC; 979 sess->auth.engine = CCP_ENGINE_SHA; 980 sess->auth.ut.sha_type = CCP_SHA_TYPE_384; 981 sess->auth.ctx_len = CCP_SB_BYTES << 1; 982 sess->auth.offset = ((CCP_SB_BYTES << 1) - 983 SHA384_DIGEST_SIZE); 984 sess->auth.block_size = SHA384_BLOCK_SIZE; 985 sess->auth.key_length = auth_xform->key.length; 986 memset(sess->auth.key, 0, sess->auth.block_size); 987 memset(sess->auth.pre_compute, 0, 988 sess->auth.ctx_len << 1); 989 rte_memcpy(sess->auth.key, auth_xform->key.data, 990 auth_xform->key.length); 991 if (generate_partial_hash(sess)) 992 return -1; 993 } 994 break; 995 case RTE_CRYPTO_AUTH_SHA3_384: 996 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384; 997 sess->auth.engine = CCP_ENGINE_SHA; 998 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384; 999 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 1000 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE; 1001 break; 1002 case RTE_CRYPTO_AUTH_SHA3_384_HMAC: 1003 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE) 1004 return -1; 1005 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC; 1006 sess->auth.engine = CCP_ENGINE_SHA; 1007 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384; 1008 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 1009 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE; 1010 sess->auth.block_size = SHA3_384_BLOCK_SIZE; 1011 sess->auth.key_length = auth_xform->key.length; 1012 memset(sess->auth.key, 0, sess->auth.block_size); 1013 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len); 1014 rte_memcpy(sess->auth.key, auth_xform->key.data, 1015 auth_xform->key.length); 1016 if (generate_partial_hash(sess)) 1017 return -1; 1018 break; 1019 case RTE_CRYPTO_AUTH_SHA512: 1020 sess->auth.algo = CCP_AUTH_ALGO_SHA512; 1021 sess->auth.engine = CCP_ENGINE_SHA; 1022 sess->auth.ut.sha_type = CCP_SHA_TYPE_512; 1023 sess->auth.ctx = (void *)ccp_sha512_init; 1024 sess->auth.ctx_len = CCP_SB_BYTES << 1; 1025 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE; 1026 rte_memcpy(sha_ctx, sess->auth.ctx, SHA512_DIGEST_SIZE); 1027 break; 1028 case RTE_CRYPTO_AUTH_SHA512_HMAC: 1029 if (sess->auth_opt) { 1030 if (auth_xform->key.length > SHA512_BLOCK_SIZE) 1031 return -1; 1032 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC; 1033 sess->auth.offset = ((CCP_SB_BYTES << 1) - 1034 SHA512_DIGEST_SIZE); 1035 sess->auth.block_size = SHA512_BLOCK_SIZE; 1036 sess->auth.key_length = auth_xform->key.length; 1037 memset(sess->auth.key, 0, sess->auth.block_size); 1038 rte_memcpy(sess->auth.key, auth_xform->key.data, 1039 auth_xform->key.length); 1040 } else { 1041 if (auth_xform->key.length > SHA512_BLOCK_SIZE) 1042 return -1; 1043 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC; 1044 sess->auth.engine = CCP_ENGINE_SHA; 1045 sess->auth.ut.sha_type = CCP_SHA_TYPE_512; 1046 sess->auth.ctx_len = CCP_SB_BYTES << 1; 1047 sess->auth.offset = ((CCP_SB_BYTES << 1) - 1048 SHA512_DIGEST_SIZE); 1049 sess->auth.block_size = SHA512_BLOCK_SIZE; 1050 sess->auth.key_length = auth_xform->key.length; 1051 memset(sess->auth.key, 0, sess->auth.block_size); 1052 memset(sess->auth.pre_compute, 0, 1053 sess->auth.ctx_len << 1); 1054 rte_memcpy(sess->auth.key, auth_xform->key.data, 1055 auth_xform->key.length); 1056 if (generate_partial_hash(sess)) 1057 return -1; 1058 } 1059 break; 1060 case RTE_CRYPTO_AUTH_SHA3_512: 1061 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512; 1062 sess->auth.engine = CCP_ENGINE_SHA; 1063 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512; 1064 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 1065 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE; 1066 break; 1067 case RTE_CRYPTO_AUTH_SHA3_512_HMAC: 1068 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE) 1069 return -1; 1070 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC; 1071 sess->auth.engine = CCP_ENGINE_SHA; 1072 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512; 1073 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE; 1074 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE; 1075 sess->auth.block_size = SHA3_512_BLOCK_SIZE; 1076 sess->auth.key_length = auth_xform->key.length; 1077 memset(sess->auth.key, 0, sess->auth.block_size); 1078 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len); 1079 rte_memcpy(sess->auth.key, auth_xform->key.data, 1080 auth_xform->key.length); 1081 if (generate_partial_hash(sess)) 1082 return -1; 1083 break; 1084 case RTE_CRYPTO_AUTH_AES_CMAC: 1085 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC; 1086 sess->auth.engine = CCP_ENGINE_AES; 1087 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC; 1088 sess->auth.key_length = auth_xform->key.length; 1089 /* padding and hash result */ 1090 sess->auth.ctx_len = CCP_SB_BYTES << 1; 1091 sess->auth.offset = AES_BLOCK_SIZE; 1092 sess->auth.block_size = AES_BLOCK_SIZE; 1093 if (sess->auth.key_length == 16) 1094 sess->auth.ut.aes_type = CCP_AES_TYPE_128; 1095 else if (sess->auth.key_length == 24) 1096 sess->auth.ut.aes_type = CCP_AES_TYPE_192; 1097 else if (sess->auth.key_length == 32) 1098 sess->auth.ut.aes_type = CCP_AES_TYPE_256; 1099 else { 1100 CCP_LOG_ERR("Invalid CMAC key length"); 1101 return -1; 1102 } 1103 rte_memcpy(sess->auth.key, auth_xform->key.data, 1104 sess->auth.key_length); 1105 for (i = 0; i < sess->auth.key_length; i++) 1106 sess->auth.key_ccp[sess->auth.key_length - i - 1] = 1107 sess->auth.key[i]; 1108 if (generate_cmac_subkeys(sess)) 1109 return -1; 1110 break; 1111 default: 1112 CCP_LOG_ERR("Unsupported hash algo"); 1113 return -ENOTSUP; 1114 } 1115 return 0; 1116 } 1117 1118 static int 1119 ccp_configure_session_aead(struct ccp_session *sess, 1120 const struct rte_crypto_sym_xform *xform) 1121 { 1122 const struct rte_crypto_aead_xform *aead_xform = NULL; 1123 size_t i; 1124 1125 aead_xform = &xform->aead; 1126 1127 sess->cipher.key_length = aead_xform->key.length; 1128 rte_memcpy(sess->cipher.key, aead_xform->key.data, 1129 aead_xform->key.length); 1130 1131 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) { 1132 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT; 1133 sess->auth.op = CCP_AUTH_OP_GENERATE; 1134 } else { 1135 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT; 1136 sess->auth.op = CCP_AUTH_OP_VERIFY; 1137 } 1138 sess->aead_algo = aead_xform->algo; 1139 sess->auth.aad_length = aead_xform->aad_length; 1140 sess->auth.digest_length = aead_xform->digest_length; 1141 1142 /* set iv parameters */ 1143 sess->iv.offset = aead_xform->iv.offset; 1144 sess->iv.length = aead_xform->iv.length; 1145 1146 switch (aead_xform->algo) { 1147 case RTE_CRYPTO_AEAD_AES_GCM: 1148 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM; 1149 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR; 1150 sess->cipher.engine = CCP_ENGINE_AES; 1151 if (sess->cipher.key_length == 16) 1152 sess->cipher.ut.aes_type = CCP_AES_TYPE_128; 1153 else if (sess->cipher.key_length == 24) 1154 sess->cipher.ut.aes_type = CCP_AES_TYPE_192; 1155 else if (sess->cipher.key_length == 32) 1156 sess->cipher.ut.aes_type = CCP_AES_TYPE_256; 1157 else { 1158 CCP_LOG_ERR("Invalid aead key length"); 1159 return -1; 1160 } 1161 for (i = 0; i < sess->cipher.key_length; i++) 1162 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] = 1163 sess->cipher.key[i]; 1164 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM; 1165 sess->auth.engine = CCP_ENGINE_AES; 1166 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH; 1167 sess->auth.ctx_len = CCP_SB_BYTES; 1168 sess->auth.offset = 0; 1169 sess->auth.block_size = AES_BLOCK_SIZE; 1170 sess->cmd_id = CCP_CMD_COMBINED; 1171 break; 1172 default: 1173 CCP_LOG_ERR("Unsupported aead algo"); 1174 return -ENOTSUP; 1175 } 1176 if (iommu_mode == 2) { 1177 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce); 1178 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp); 1179 } else { 1180 sess->cipher.nonce_phys = rte_mem_virt2phy(sess->cipher.nonce); 1181 sess->cipher.key_phys = rte_mem_virt2phy(sess->cipher.key_ccp); 1182 } 1183 return 0; 1184 } 1185 1186 int 1187 ccp_set_session_parameters(struct ccp_session *sess, 1188 const struct rte_crypto_sym_xform *xform, 1189 struct ccp_private *internals) 1190 { 1191 const struct rte_crypto_sym_xform *cipher_xform = NULL; 1192 const struct rte_crypto_sym_xform *auth_xform = NULL; 1193 const struct rte_crypto_sym_xform *aead_xform = NULL; 1194 int ret = 0; 1195 1196 sess->auth_opt = internals->auth_opt; 1197 sess->cmd_id = ccp_get_cmd_id(xform); 1198 1199 switch (sess->cmd_id) { 1200 case CCP_CMD_CIPHER: 1201 cipher_xform = xform; 1202 break; 1203 case CCP_CMD_AUTH: 1204 auth_xform = xform; 1205 break; 1206 case CCP_CMD_CIPHER_HASH: 1207 cipher_xform = xform; 1208 auth_xform = xform->next; 1209 break; 1210 case CCP_CMD_HASH_CIPHER: 1211 auth_xform = xform; 1212 cipher_xform = xform->next; 1213 break; 1214 case CCP_CMD_COMBINED: 1215 aead_xform = xform; 1216 break; 1217 default: 1218 CCP_LOG_ERR("Unsupported cmd_id"); 1219 return -1; 1220 } 1221 1222 /* Default IV length = 0 */ 1223 sess->iv.length = 0; 1224 if (cipher_xform) { 1225 ret = ccp_configure_session_cipher(sess, cipher_xform); 1226 if (ret != 0) { 1227 CCP_LOG_ERR("Invalid/unsupported cipher parameters"); 1228 return ret; 1229 } 1230 } 1231 if (auth_xform) { 1232 ret = ccp_configure_session_auth(sess, auth_xform); 1233 if (ret != 0) { 1234 CCP_LOG_ERR("Invalid/unsupported auth parameters"); 1235 return ret; 1236 } 1237 } 1238 if (aead_xform) { 1239 ret = ccp_configure_session_aead(sess, aead_xform); 1240 if (ret != 0) { 1241 CCP_LOG_ERR("Invalid/unsupported aead parameters"); 1242 return ret; 1243 } 1244 } 1245 return ret; 1246 } 1247 1248 /* calculate CCP descriptors requirement */ 1249 static inline int 1250 ccp_cipher_slot(struct ccp_session *session) 1251 { 1252 int count = 0; 1253 1254 switch (session->cipher.algo) { 1255 case CCP_CIPHER_ALGO_AES_CBC: 1256 count = 2; 1257 /**< op + passthrough for iv */ 1258 break; 1259 case CCP_CIPHER_ALGO_AES_ECB: 1260 count = 1; 1261 /**<only op*/ 1262 break; 1263 case CCP_CIPHER_ALGO_AES_CTR: 1264 count = 2; 1265 /**< op + passthrough for iv */ 1266 break; 1267 case CCP_CIPHER_ALGO_3DES_CBC: 1268 count = 2; 1269 /**< op + passthrough for iv */ 1270 break; 1271 default: 1272 CCP_LOG_ERR("Unsupported cipher algo %d", 1273 session->cipher.algo); 1274 } 1275 return count; 1276 } 1277 1278 static inline int 1279 ccp_auth_slot(struct ccp_session *session) 1280 { 1281 int count = 0; 1282 1283 switch (session->auth.algo) { 1284 case CCP_AUTH_ALGO_SHA1: 1285 case CCP_AUTH_ALGO_SHA224: 1286 case CCP_AUTH_ALGO_SHA256: 1287 case CCP_AUTH_ALGO_SHA384: 1288 case CCP_AUTH_ALGO_SHA512: 1289 count = 3; 1290 /**< op + lsb passthrough cpy to/from*/ 1291 break; 1292 case CCP_AUTH_ALGO_MD5_HMAC: 1293 break; 1294 case CCP_AUTH_ALGO_SHA1_HMAC: 1295 case CCP_AUTH_ALGO_SHA224_HMAC: 1296 case CCP_AUTH_ALGO_SHA256_HMAC: 1297 if (session->auth_opt == 0) 1298 count = 6; 1299 break; 1300 case CCP_AUTH_ALGO_SHA384_HMAC: 1301 case CCP_AUTH_ALGO_SHA512_HMAC: 1302 /** 1303 * 1. Load PHash1 = H(k ^ ipad); to LSB 1304 * 2. generate IHash = H(hash on message with PHash1 1305 * as init values); 1306 * 3. Retrieve IHash 2 slots for 384/512 1307 * 4. Load Phash2 = H(k ^ opad); to LSB 1308 * 5. generate FHash = H(hash on Ihash with Phash2 1309 * as init value); 1310 * 6. Retrieve HMAC output from LSB to host memory 1311 */ 1312 if (session->auth_opt == 0) 1313 count = 7; 1314 break; 1315 case CCP_AUTH_ALGO_SHA3_224: 1316 case CCP_AUTH_ALGO_SHA3_256: 1317 case CCP_AUTH_ALGO_SHA3_384: 1318 case CCP_AUTH_ALGO_SHA3_512: 1319 count = 1; 1320 /**< only op ctx and dst in host memory*/ 1321 break; 1322 case CCP_AUTH_ALGO_SHA3_224_HMAC: 1323 case CCP_AUTH_ALGO_SHA3_256_HMAC: 1324 count = 3; 1325 break; 1326 case CCP_AUTH_ALGO_SHA3_384_HMAC: 1327 case CCP_AUTH_ALGO_SHA3_512_HMAC: 1328 count = 4; 1329 /** 1330 * 1. Op to Perform Ihash 1331 * 2. Retrieve result from LSB to host memory 1332 * 3. Perform final hash 1333 */ 1334 break; 1335 case CCP_AUTH_ALGO_AES_CMAC: 1336 count = 4; 1337 /** 1338 * op 1339 * extra descriptor in padding case 1340 * (k1/k2(255:128) with iv(127:0)) 1341 * Retrieve result 1342 */ 1343 break; 1344 default: 1345 CCP_LOG_ERR("Unsupported auth algo %d", 1346 session->auth.algo); 1347 } 1348 1349 return count; 1350 } 1351 1352 static int 1353 ccp_aead_slot(struct ccp_session *session) 1354 { 1355 int count = 0; 1356 1357 switch (session->aead_algo) { 1358 case RTE_CRYPTO_AEAD_AES_GCM: 1359 break; 1360 default: 1361 CCP_LOG_ERR("Unsupported aead algo %d", 1362 session->aead_algo); 1363 } 1364 switch (session->auth.algo) { 1365 case CCP_AUTH_ALGO_AES_GCM: 1366 count = 5; 1367 /** 1368 * 1. Passthru iv 1369 * 2. Hash AAD 1370 * 3. GCTR 1371 * 4. Reload passthru 1372 * 5. Hash Final tag 1373 */ 1374 break; 1375 default: 1376 CCP_LOG_ERR("Unsupported combined auth ALGO %d", 1377 session->auth.algo); 1378 } 1379 return count; 1380 } 1381 1382 int 1383 ccp_compute_slot_count(struct ccp_session *session) 1384 { 1385 int count = 0; 1386 1387 switch (session->cmd_id) { 1388 case CCP_CMD_CIPHER: 1389 count = ccp_cipher_slot(session); 1390 break; 1391 case CCP_CMD_AUTH: 1392 count = ccp_auth_slot(session); 1393 break; 1394 case CCP_CMD_CIPHER_HASH: 1395 case CCP_CMD_HASH_CIPHER: 1396 count = ccp_cipher_slot(session); 1397 count += ccp_auth_slot(session); 1398 break; 1399 case CCP_CMD_COMBINED: 1400 count = ccp_aead_slot(session); 1401 break; 1402 default: 1403 CCP_LOG_ERR("Unsupported cmd_id"); 1404 1405 } 1406 1407 return count; 1408 } 1409 1410 static uint8_t 1411 algo_select(int sessalgo, 1412 const EVP_MD **algo) 1413 { 1414 int res = 0; 1415 1416 switch (sessalgo) { 1417 case CCP_AUTH_ALGO_MD5_HMAC: 1418 *algo = EVP_md5(); 1419 break; 1420 case CCP_AUTH_ALGO_SHA1_HMAC: 1421 *algo = EVP_sha1(); 1422 break; 1423 case CCP_AUTH_ALGO_SHA224_HMAC: 1424 *algo = EVP_sha224(); 1425 break; 1426 case CCP_AUTH_ALGO_SHA256_HMAC: 1427 *algo = EVP_sha256(); 1428 break; 1429 case CCP_AUTH_ALGO_SHA384_HMAC: 1430 *algo = EVP_sha384(); 1431 break; 1432 case CCP_AUTH_ALGO_SHA512_HMAC: 1433 *algo = EVP_sha512(); 1434 break; 1435 default: 1436 res = -EINVAL; 1437 break; 1438 } 1439 return res; 1440 } 1441 1442 static int 1443 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst, 1444 __rte_unused uint8_t *iv, 1445 EVP_PKEY *pkey, 1446 int srclen, 1447 EVP_MD_CTX *ctx, 1448 const EVP_MD *algo, 1449 uint16_t d_len) 1450 { 1451 size_t dstlen; 1452 unsigned char temp_dst[64]; 1453 1454 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0) 1455 goto process_auth_err; 1456 1457 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0) 1458 goto process_auth_err; 1459 1460 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0) 1461 goto process_auth_err; 1462 1463 memcpy(dst, temp_dst, d_len); 1464 return 0; 1465 process_auth_err: 1466 CCP_LOG_ERR("Process cpu auth failed"); 1467 return -EINVAL; 1468 } 1469 1470 static int cpu_crypto_auth(struct ccp_qp *qp, 1471 struct rte_crypto_op *op, 1472 struct ccp_session *sess, 1473 EVP_MD_CTX *ctx) 1474 { 1475 uint8_t *src, *dst; 1476 int srclen, status; 1477 struct rte_mbuf *mbuf_src, *mbuf_dst; 1478 const EVP_MD *algo = NULL; 1479 EVP_PKEY *pkey; 1480 1481 algo_select(sess->auth.algo, &algo); 1482 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key, 1483 sess->auth.key_length); 1484 mbuf_src = op->sym->m_src; 1485 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src; 1486 srclen = op->sym->auth.data.length; 1487 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *, 1488 op->sym->auth.data.offset); 1489 1490 if (sess->auth.op == CCP_AUTH_OP_VERIFY) { 1491 dst = qp->temp_digest; 1492 } else { 1493 dst = op->sym->auth.digest.data; 1494 if (dst == NULL) { 1495 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *, 1496 op->sym->auth.data.offset + 1497 sess->auth.digest_length); 1498 } 1499 } 1500 status = process_cpu_auth_hmac(src, dst, NULL, 1501 pkey, srclen, 1502 ctx, 1503 algo, 1504 sess->auth.digest_length); 1505 if (status) { 1506 op->status = RTE_CRYPTO_OP_STATUS_ERROR; 1507 return status; 1508 } 1509 1510 if (sess->auth.op == CCP_AUTH_OP_VERIFY) { 1511 if (memcmp(dst, op->sym->auth.digest.data, 1512 sess->auth.digest_length) != 0) { 1513 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED; 1514 } else { 1515 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 1516 } 1517 } else { 1518 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 1519 } 1520 EVP_PKEY_free(pkey); 1521 return 0; 1522 } 1523 1524 static void 1525 ccp_perform_passthru(struct ccp_passthru *pst, 1526 struct ccp_queue *cmd_q) 1527 { 1528 struct ccp_desc *desc; 1529 union ccp_function function; 1530 1531 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1532 1533 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU; 1534 1535 CCP_CMD_SOC(desc) = 0; 1536 CCP_CMD_IOC(desc) = 0; 1537 CCP_CMD_INIT(desc) = 0; 1538 CCP_CMD_EOM(desc) = 0; 1539 CCP_CMD_PROT(desc) = 0; 1540 1541 function.raw = 0; 1542 CCP_PT_BYTESWAP(&function) = pst->byte_swap; 1543 CCP_PT_BITWISE(&function) = pst->bit_mod; 1544 CCP_CMD_FUNCTION(desc) = function.raw; 1545 1546 CCP_CMD_LEN(desc) = pst->len; 1547 1548 if (pst->dir) { 1549 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr); 1550 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr); 1551 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1552 1553 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr); 1554 CCP_CMD_DST_HI(desc) = 0; 1555 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB; 1556 1557 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) 1558 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key; 1559 } else { 1560 1561 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr); 1562 CCP_CMD_SRC_HI(desc) = 0; 1563 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB; 1564 1565 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr); 1566 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr); 1567 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1568 } 1569 1570 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1571 } 1572 1573 static int 1574 ccp_perform_hmac(struct rte_crypto_op *op, 1575 struct ccp_queue *cmd_q) 1576 { 1577 1578 struct ccp_session *session; 1579 union ccp_function function; 1580 struct ccp_desc *desc; 1581 uint32_t tail; 1582 phys_addr_t src_addr, dest_addr, dest_addr_t; 1583 struct ccp_passthru pst; 1584 uint64_t auth_msg_bits; 1585 void *append_ptr; 1586 uint8_t *addr; 1587 1588 session = (struct ccp_session *)get_sym_session_private_data( 1589 op->sym->session, 1590 ccp_cryptodev_driver_id); 1591 addr = session->auth.pre_compute; 1592 1593 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 1594 op->sym->auth.data.offset); 1595 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src, 1596 session->auth.ctx_len); 1597 if (iommu_mode == 2) { 1598 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr); 1599 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr); 1600 } else { 1601 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr); 1602 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr); 1603 } 1604 dest_addr_t = dest_addr; 1605 1606 /** Load PHash1 to LSB*/ 1607 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1608 pst.len = session->auth.ctx_len; 1609 pst.dir = 1; 1610 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1611 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 1612 ccp_perform_passthru(&pst, cmd_q); 1613 1614 /**sha engine command descriptor for IntermediateHash*/ 1615 1616 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1617 memset(desc, 0, Q_DESC_SIZE); 1618 1619 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 1620 1621 CCP_CMD_SOC(desc) = 0; 1622 CCP_CMD_IOC(desc) = 0; 1623 CCP_CMD_INIT(desc) = 1; 1624 CCP_CMD_EOM(desc) = 1; 1625 CCP_CMD_PROT(desc) = 0; 1626 1627 function.raw = 0; 1628 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 1629 CCP_CMD_FUNCTION(desc) = function.raw; 1630 1631 CCP_CMD_LEN(desc) = op->sym->auth.data.length; 1632 auth_msg_bits = (op->sym->auth.data.length + 1633 session->auth.block_size) * 8; 1634 1635 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 1636 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 1637 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1638 1639 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha; 1640 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits); 1641 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits); 1642 1643 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1644 1645 rte_wmb(); 1646 1647 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 1648 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 1649 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 1650 cmd_q->qcontrol | CMD_Q_RUN); 1651 1652 /* Intermediate Hash value retrieve */ 1653 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) || 1654 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) { 1655 1656 pst.src_addr = 1657 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES); 1658 pst.dest_addr = dest_addr_t; 1659 pst.len = CCP_SB_BYTES; 1660 pst.dir = 0; 1661 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1662 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1663 ccp_perform_passthru(&pst, cmd_q); 1664 1665 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1666 pst.dest_addr = dest_addr_t + CCP_SB_BYTES; 1667 pst.len = CCP_SB_BYTES; 1668 pst.dir = 0; 1669 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1670 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1671 ccp_perform_passthru(&pst, cmd_q); 1672 1673 } else { 1674 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1675 pst.dest_addr = dest_addr_t; 1676 pst.len = session->auth.ctx_len; 1677 pst.dir = 0; 1678 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1679 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1680 ccp_perform_passthru(&pst, cmd_q); 1681 1682 } 1683 1684 /** Load PHash2 to LSB*/ 1685 addr += session->auth.ctx_len; 1686 if (iommu_mode == 2) 1687 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr); 1688 else 1689 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *)addr); 1690 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1691 pst.len = session->auth.ctx_len; 1692 pst.dir = 1; 1693 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1694 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 1695 ccp_perform_passthru(&pst, cmd_q); 1696 1697 /**sha engine command descriptor for FinalHash*/ 1698 dest_addr_t += session->auth.offset; 1699 1700 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1701 memset(desc, 0, Q_DESC_SIZE); 1702 1703 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 1704 1705 CCP_CMD_SOC(desc) = 0; 1706 CCP_CMD_IOC(desc) = 0; 1707 CCP_CMD_INIT(desc) = 1; 1708 CCP_CMD_EOM(desc) = 1; 1709 CCP_CMD_PROT(desc) = 0; 1710 1711 function.raw = 0; 1712 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 1713 CCP_CMD_FUNCTION(desc) = function.raw; 1714 1715 CCP_CMD_LEN(desc) = (session->auth.ctx_len - 1716 session->auth.offset); 1717 auth_msg_bits = (session->auth.block_size + 1718 session->auth.ctx_len - 1719 session->auth.offset) * 8; 1720 1721 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t); 1722 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t); 1723 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1724 1725 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha; 1726 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits); 1727 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits); 1728 1729 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1730 1731 rte_wmb(); 1732 1733 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 1734 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 1735 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 1736 cmd_q->qcontrol | CMD_Q_RUN); 1737 1738 /* Retrieve hmac output */ 1739 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1740 pst.dest_addr = dest_addr; 1741 pst.len = session->auth.ctx_len; 1742 pst.dir = 0; 1743 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1744 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) || 1745 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) 1746 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 1747 else 1748 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1749 ccp_perform_passthru(&pst, cmd_q); 1750 1751 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 1752 return 0; 1753 1754 } 1755 1756 static int 1757 ccp_perform_sha(struct rte_crypto_op *op, 1758 struct ccp_queue *cmd_q) 1759 { 1760 struct ccp_session *session; 1761 union ccp_function function; 1762 struct ccp_desc *desc; 1763 uint32_t tail; 1764 phys_addr_t src_addr, dest_addr; 1765 struct ccp_passthru pst; 1766 void *append_ptr; 1767 uint64_t auth_msg_bits; 1768 1769 session = (struct ccp_session *)get_sym_session_private_data( 1770 op->sym->session, 1771 ccp_cryptodev_driver_id); 1772 1773 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 1774 op->sym->auth.data.offset); 1775 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src, 1776 session->auth.ctx_len); 1777 if (iommu_mode == 2) { 1778 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr); 1779 pst.src_addr = (phys_addr_t)sha_ctx; 1780 } else { 1781 dest_addr = (phys_addr_t)rte_mem_virt2phy(append_ptr); 1782 pst.src_addr = (phys_addr_t)rte_mem_virt2phy((void *) 1783 session->auth.ctx); 1784 } 1785 1786 /** Passthru sha context*/ 1787 1788 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1789 pst.len = session->auth.ctx_len; 1790 pst.dir = 1; 1791 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1792 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 1793 ccp_perform_passthru(&pst, cmd_q); 1794 1795 /**prepare sha command descriptor*/ 1796 1797 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1798 memset(desc, 0, Q_DESC_SIZE); 1799 1800 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 1801 1802 CCP_CMD_SOC(desc) = 0; 1803 CCP_CMD_IOC(desc) = 0; 1804 CCP_CMD_INIT(desc) = 1; 1805 CCP_CMD_EOM(desc) = 1; 1806 CCP_CMD_PROT(desc) = 0; 1807 1808 function.raw = 0; 1809 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 1810 CCP_CMD_FUNCTION(desc) = function.raw; 1811 1812 CCP_CMD_LEN(desc) = op->sym->auth.data.length; 1813 auth_msg_bits = op->sym->auth.data.length * 8; 1814 1815 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 1816 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 1817 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1818 1819 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha; 1820 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits); 1821 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits); 1822 1823 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1824 1825 rte_wmb(); 1826 1827 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 1828 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 1829 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 1830 cmd_q->qcontrol | CMD_Q_RUN); 1831 1832 /* Hash value retrieve */ 1833 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1834 pst.dest_addr = dest_addr; 1835 pst.len = session->auth.ctx_len; 1836 pst.dir = 0; 1837 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1838 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) || 1839 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) 1840 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 1841 else 1842 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1843 ccp_perform_passthru(&pst, cmd_q); 1844 1845 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 1846 return 0; 1847 1848 } 1849 1850 static int 1851 ccp_perform_sha3_hmac(struct rte_crypto_op *op, 1852 struct ccp_queue *cmd_q) 1853 { 1854 struct ccp_session *session; 1855 struct ccp_passthru pst; 1856 union ccp_function function; 1857 struct ccp_desc *desc; 1858 uint8_t *append_ptr; 1859 uint32_t tail; 1860 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t; 1861 1862 session = (struct ccp_session *)get_sym_session_private_data( 1863 op->sym->session, 1864 ccp_cryptodev_driver_id); 1865 1866 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 1867 op->sym->auth.data.offset); 1868 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src, 1869 session->auth.ctx_len); 1870 if (!append_ptr) { 1871 CCP_LOG_ERR("CCP MBUF append failed\n"); 1872 return -1; 1873 } 1874 if (iommu_mode == 2) { 1875 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr); 1876 ctx_paddr = (phys_addr_t)rte_mem_virt2iova( 1877 session->auth.pre_compute); 1878 } else { 1879 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr); 1880 ctx_paddr = (phys_addr_t)rte_mem_virt2phy( 1881 session->auth.pre_compute); 1882 } 1883 dest_addr_t = dest_addr + (session->auth.ctx_len / 2); 1884 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1885 memset(desc, 0, Q_DESC_SIZE); 1886 1887 /*desc1 for SHA3-Ihash operation */ 1888 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 1889 CCP_CMD_INIT(desc) = 1; 1890 CCP_CMD_EOM(desc) = 1; 1891 1892 function.raw = 0; 1893 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 1894 CCP_CMD_FUNCTION(desc) = function.raw; 1895 CCP_CMD_LEN(desc) = op->sym->auth.data.length; 1896 1897 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 1898 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 1899 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1900 1901 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES); 1902 CCP_CMD_DST_HI(desc) = 0; 1903 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB; 1904 1905 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr); 1906 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr); 1907 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1908 1909 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1910 1911 rte_wmb(); 1912 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 1913 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 1914 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 1915 cmd_q->qcontrol | CMD_Q_RUN); 1916 1917 /* Intermediate Hash value retrieve */ 1918 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) || 1919 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) { 1920 1921 pst.src_addr = 1922 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES); 1923 pst.dest_addr = dest_addr_t; 1924 pst.len = CCP_SB_BYTES; 1925 pst.dir = 0; 1926 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1927 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1928 ccp_perform_passthru(&pst, cmd_q); 1929 1930 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1931 pst.dest_addr = dest_addr_t + CCP_SB_BYTES; 1932 pst.len = CCP_SB_BYTES; 1933 pst.dir = 0; 1934 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1935 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1936 ccp_perform_passthru(&pst, cmd_q); 1937 1938 } else { 1939 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES); 1940 pst.dest_addr = dest_addr_t; 1941 pst.len = CCP_SB_BYTES; 1942 pst.dir = 0; 1943 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 1944 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 1945 ccp_perform_passthru(&pst, cmd_q); 1946 } 1947 1948 /**sha engine command descriptor for FinalHash*/ 1949 ctx_paddr += CCP_SHA3_CTX_SIZE; 1950 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 1951 memset(desc, 0, Q_DESC_SIZE); 1952 1953 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 1954 CCP_CMD_INIT(desc) = 1; 1955 CCP_CMD_EOM(desc) = 1; 1956 1957 function.raw = 0; 1958 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 1959 CCP_CMD_FUNCTION(desc) = function.raw; 1960 1961 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) { 1962 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE); 1963 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE; 1964 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) { 1965 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE; 1966 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) { 1967 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE); 1968 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE; 1969 } else { 1970 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE; 1971 } 1972 1973 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t); 1974 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t); 1975 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1976 1977 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr; 1978 CCP_CMD_DST_HI(desc) = high32_value(dest_addr); 1979 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1980 1981 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr); 1982 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr); 1983 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 1984 1985 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 1986 1987 rte_wmb(); 1988 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 1989 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 1990 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 1991 cmd_q->qcontrol | CMD_Q_RUN); 1992 1993 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 1994 return 0; 1995 } 1996 1997 static int 1998 ccp_perform_sha3(struct rte_crypto_op *op, 1999 struct ccp_queue *cmd_q) 2000 { 2001 struct ccp_session *session; 2002 union ccp_function function; 2003 struct ccp_desc *desc; 2004 uint8_t *ctx_addr = NULL, *append_ptr = NULL; 2005 uint32_t tail; 2006 phys_addr_t src_addr, dest_addr, ctx_paddr; 2007 2008 session = (struct ccp_session *)get_sym_session_private_data( 2009 op->sym->session, 2010 ccp_cryptodev_driver_id); 2011 2012 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 2013 op->sym->auth.data.offset); 2014 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src, 2015 session->auth.ctx_len); 2016 if (!append_ptr) { 2017 CCP_LOG_ERR("CCP MBUF append failed\n"); 2018 return -1; 2019 } 2020 if (iommu_mode == 2) { 2021 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr); 2022 ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr); 2023 } else { 2024 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr); 2025 ctx_paddr = (phys_addr_t)rte_mem_virt2phy((void *)ctx_addr); 2026 } 2027 2028 ctx_addr = session->auth.sha3_ctx; 2029 2030 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2031 memset(desc, 0, Q_DESC_SIZE); 2032 2033 /* prepare desc for SHA3 operation */ 2034 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA; 2035 CCP_CMD_INIT(desc) = 1; 2036 CCP_CMD_EOM(desc) = 1; 2037 2038 function.raw = 0; 2039 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type; 2040 CCP_CMD_FUNCTION(desc) = function.raw; 2041 2042 CCP_CMD_LEN(desc) = op->sym->auth.data.length; 2043 2044 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2045 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2046 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2047 2048 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr); 2049 CCP_CMD_DST_HI(desc) = high32_value(dest_addr); 2050 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2051 2052 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr); 2053 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr); 2054 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2055 2056 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2057 2058 rte_wmb(); 2059 2060 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2061 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2062 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2063 cmd_q->qcontrol | CMD_Q_RUN); 2064 2065 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 2066 return 0; 2067 } 2068 2069 static int 2070 ccp_perform_aes_cmac(struct rte_crypto_op *op, 2071 struct ccp_queue *cmd_q) 2072 { 2073 struct ccp_session *session; 2074 union ccp_function function; 2075 struct ccp_passthru pst; 2076 struct ccp_desc *desc; 2077 uint32_t tail; 2078 uint8_t *src_tb, *append_ptr, *ctx_addr; 2079 phys_addr_t src_addr, dest_addr, key_addr; 2080 int length, non_align_len; 2081 2082 session = (struct ccp_session *)get_sym_session_private_data( 2083 op->sym->session, 2084 ccp_cryptodev_driver_id); 2085 key_addr = rte_mem_virt2phy(session->auth.key_ccp); 2086 2087 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 2088 op->sym->auth.data.offset); 2089 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src, 2090 session->auth.ctx_len); 2091 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr); 2092 2093 function.raw = 0; 2094 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT; 2095 CCP_AES_MODE(&function) = session->auth.um.aes_mode; 2096 CCP_AES_TYPE(&function) = session->auth.ut.aes_type; 2097 2098 if (op->sym->auth.data.length % session->auth.block_size == 0) { 2099 2100 ctx_addr = session->auth.pre_compute; 2101 memset(ctx_addr, 0, AES_BLOCK_SIZE); 2102 if (iommu_mode == 2) 2103 pst.src_addr = (phys_addr_t)rte_mem_virt2iova( 2104 (void *)ctx_addr); 2105 else 2106 pst.src_addr = (phys_addr_t)rte_mem_virt2phy( 2107 (void *)ctx_addr); 2108 2109 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2110 pst.len = CCP_SB_BYTES; 2111 pst.dir = 1; 2112 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2113 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 2114 ccp_perform_passthru(&pst, cmd_q); 2115 2116 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2117 memset(desc, 0, Q_DESC_SIZE); 2118 2119 /* prepare desc for aes-cmac command */ 2120 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2121 CCP_CMD_EOM(desc) = 1; 2122 CCP_CMD_FUNCTION(desc) = function.raw; 2123 2124 CCP_CMD_LEN(desc) = op->sym->auth.data.length; 2125 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2126 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2127 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2128 2129 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2130 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2131 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2132 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2133 2134 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2135 2136 rte_wmb(); 2137 2138 tail = 2139 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2140 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2141 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2142 cmd_q->qcontrol | CMD_Q_RUN); 2143 } else { 2144 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES; 2145 memset(ctx_addr, 0, AES_BLOCK_SIZE); 2146 if (iommu_mode == 2) 2147 pst.src_addr = (phys_addr_t)rte_mem_virt2iova( 2148 (void *)ctx_addr); 2149 else 2150 pst.src_addr = (phys_addr_t)rte_mem_virt2phy( 2151 (void *)ctx_addr); 2152 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2153 pst.len = CCP_SB_BYTES; 2154 pst.dir = 1; 2155 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2156 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 2157 ccp_perform_passthru(&pst, cmd_q); 2158 2159 length = (op->sym->auth.data.length / AES_BLOCK_SIZE); 2160 length *= AES_BLOCK_SIZE; 2161 non_align_len = op->sym->auth.data.length - length; 2162 /* prepare desc for aes-cmac command */ 2163 /*Command 1*/ 2164 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2165 memset(desc, 0, Q_DESC_SIZE); 2166 2167 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2168 CCP_CMD_INIT(desc) = 1; 2169 CCP_CMD_FUNCTION(desc) = function.raw; 2170 2171 CCP_CMD_LEN(desc) = length; 2172 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2173 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2174 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2175 2176 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2177 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2178 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2179 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2180 2181 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2182 2183 /*Command 2*/ 2184 append_ptr = append_ptr + CCP_SB_BYTES; 2185 memset(append_ptr, 0, AES_BLOCK_SIZE); 2186 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src, 2187 uint8_t *, 2188 op->sym->auth.data.offset + 2189 length); 2190 rte_memcpy(append_ptr, src_tb, non_align_len); 2191 append_ptr[non_align_len] = CMAC_PAD_VALUE; 2192 2193 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2194 memset(desc, 0, Q_DESC_SIZE); 2195 2196 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2197 CCP_CMD_EOM(desc) = 1; 2198 CCP_CMD_FUNCTION(desc) = function.raw; 2199 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE; 2200 2201 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES)); 2202 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES); 2203 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2204 2205 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2206 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2207 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2208 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2209 2210 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2211 2212 rte_wmb(); 2213 tail = 2214 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2215 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2216 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2217 cmd_q->qcontrol | CMD_Q_RUN); 2218 } 2219 /* Retrieve result */ 2220 pst.dest_addr = dest_addr; 2221 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2222 pst.len = CCP_SB_BYTES; 2223 pst.dir = 0; 2224 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2225 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 2226 ccp_perform_passthru(&pst, cmd_q); 2227 2228 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 2229 return 0; 2230 } 2231 2232 static int 2233 ccp_perform_aes(struct rte_crypto_op *op, 2234 struct ccp_queue *cmd_q, 2235 struct ccp_batch_info *b_info) 2236 { 2237 struct ccp_session *session; 2238 union ccp_function function; 2239 uint8_t *lsb_buf; 2240 struct ccp_passthru pst = {0}; 2241 struct ccp_desc *desc; 2242 phys_addr_t src_addr, dest_addr, key_addr; 2243 uint8_t *iv; 2244 2245 session = (struct ccp_session *)get_sym_session_private_data( 2246 op->sym->session, 2247 ccp_cryptodev_driver_id); 2248 function.raw = 0; 2249 2250 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset); 2251 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) { 2252 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) { 2253 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, 2254 iv, session->iv.length); 2255 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys; 2256 CCP_AES_SIZE(&function) = 0x1F; 2257 } else { 2258 lsb_buf = 2259 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]); 2260 rte_memcpy(lsb_buf + 2261 (CCP_SB_BYTES - session->iv.length), 2262 iv, session->iv.length); 2263 pst.src_addr = b_info->lsb_buf_phys + 2264 (b_info->lsb_buf_idx * CCP_SB_BYTES); 2265 b_info->lsb_buf_idx++; 2266 } 2267 2268 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2269 pst.len = CCP_SB_BYTES; 2270 pst.dir = 1; 2271 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2272 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 2273 ccp_perform_passthru(&pst, cmd_q); 2274 } 2275 2276 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2277 2278 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 2279 op->sym->cipher.data.offset); 2280 if (likely(op->sym->m_dst != NULL)) 2281 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst, 2282 op->sym->cipher.data.offset); 2283 else 2284 dest_addr = src_addr; 2285 key_addr = session->cipher.key_phys; 2286 2287 /* prepare desc for aes command */ 2288 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2289 CCP_CMD_INIT(desc) = 1; 2290 CCP_CMD_EOM(desc) = 1; 2291 2292 CCP_AES_ENCRYPT(&function) = session->cipher.dir; 2293 CCP_AES_MODE(&function) = session->cipher.um.aes_mode; 2294 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type; 2295 CCP_CMD_FUNCTION(desc) = function.raw; 2296 2297 CCP_CMD_LEN(desc) = op->sym->cipher.data.length; 2298 2299 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2300 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2301 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2302 2303 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr); 2304 CCP_CMD_DST_HI(desc) = high32_value(dest_addr); 2305 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2306 2307 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2308 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2309 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2310 2311 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) 2312 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2313 2314 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2315 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 2316 return 0; 2317 } 2318 2319 static int 2320 ccp_perform_3des(struct rte_crypto_op *op, 2321 struct ccp_queue *cmd_q, 2322 struct ccp_batch_info *b_info) 2323 { 2324 struct ccp_session *session; 2325 union ccp_function function; 2326 unsigned char *lsb_buf; 2327 struct ccp_passthru pst; 2328 struct ccp_desc *desc; 2329 uint32_t tail; 2330 uint8_t *iv; 2331 phys_addr_t src_addr, dest_addr, key_addr; 2332 2333 session = (struct ccp_session *)get_sym_session_private_data( 2334 op->sym->session, 2335 ccp_cryptodev_driver_id); 2336 2337 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset); 2338 switch (session->cipher.um.des_mode) { 2339 case CCP_DES_MODE_CBC: 2340 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]); 2341 b_info->lsb_buf_idx++; 2342 2343 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length), 2344 iv, session->iv.length); 2345 if (iommu_mode == 2) 2346 pst.src_addr = (phys_addr_t)rte_mem_virt2iova( 2347 (void *) lsb_buf); 2348 else 2349 pst.src_addr = (phys_addr_t)rte_mem_virt2phy( 2350 (void *) lsb_buf); 2351 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2352 pst.len = CCP_SB_BYTES; 2353 pst.dir = 1; 2354 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2355 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT; 2356 ccp_perform_passthru(&pst, cmd_q); 2357 break; 2358 case CCP_DES_MODE_CFB: 2359 case CCP_DES_MODE_ECB: 2360 CCP_LOG_ERR("Unsupported DES cipher mode"); 2361 return -ENOTSUP; 2362 } 2363 2364 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 2365 op->sym->cipher.data.offset); 2366 if (unlikely(op->sym->m_dst != NULL)) 2367 dest_addr = 2368 rte_pktmbuf_iova_offset(op->sym->m_dst, 2369 op->sym->cipher.data.offset); 2370 else 2371 dest_addr = src_addr; 2372 2373 if (iommu_mode == 2) 2374 key_addr = rte_mem_virt2iova(session->cipher.key_ccp); 2375 else 2376 key_addr = rte_mem_virt2phy(session->cipher.key_ccp); 2377 2378 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2379 2380 memset(desc, 0, Q_DESC_SIZE); 2381 2382 /* prepare desc for des command */ 2383 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES; 2384 2385 CCP_CMD_SOC(desc) = 0; 2386 CCP_CMD_IOC(desc) = 0; 2387 CCP_CMD_INIT(desc) = 1; 2388 CCP_CMD_EOM(desc) = 1; 2389 CCP_CMD_PROT(desc) = 0; 2390 2391 function.raw = 0; 2392 CCP_DES_ENCRYPT(&function) = session->cipher.dir; 2393 CCP_DES_MODE(&function) = session->cipher.um.des_mode; 2394 CCP_DES_TYPE(&function) = session->cipher.ut.des_type; 2395 CCP_CMD_FUNCTION(desc) = function.raw; 2396 2397 CCP_CMD_LEN(desc) = op->sym->cipher.data.length; 2398 2399 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2400 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2401 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2402 2403 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr); 2404 CCP_CMD_DST_HI(desc) = high32_value(dest_addr); 2405 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2406 2407 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2408 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2409 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2410 2411 if (session->cipher.um.des_mode) 2412 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2413 2414 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2415 2416 rte_wmb(); 2417 2418 /* Write the new tail address back to the queue register */ 2419 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2420 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2421 /* Turn the queue back on using our cached control register */ 2422 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2423 cmd_q->qcontrol | CMD_Q_RUN); 2424 2425 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 2426 return 0; 2427 } 2428 2429 static int 2430 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q) 2431 { 2432 struct ccp_session *session; 2433 union ccp_function function; 2434 uint8_t *iv; 2435 struct ccp_passthru pst; 2436 struct ccp_desc *desc; 2437 uint32_t tail; 2438 uint64_t *temp; 2439 phys_addr_t src_addr, dest_addr, key_addr, aad_addr; 2440 phys_addr_t digest_dest_addr; 2441 int length, non_align_len; 2442 2443 session = (struct ccp_session *)get_sym_session_private_data( 2444 op->sym->session, 2445 ccp_cryptodev_driver_id); 2446 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset); 2447 key_addr = session->cipher.key_phys; 2448 2449 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src, 2450 op->sym->aead.data.offset); 2451 if (unlikely(op->sym->m_dst != NULL)) 2452 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst, 2453 op->sym->aead.data.offset); 2454 else 2455 dest_addr = src_addr; 2456 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len); 2457 digest_dest_addr = op->sym->aead.digest.phys_addr; 2458 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE); 2459 *temp++ = rte_bswap64(session->auth.aad_length << 3); 2460 *temp = rte_bswap64(op->sym->aead.data.length << 3); 2461 2462 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE; 2463 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE); 2464 2465 aad_addr = op->sym->aead.aad.phys_addr; 2466 2467 /* CMD1 IV Passthru */ 2468 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv, 2469 session->iv.length); 2470 pst.src_addr = session->cipher.nonce_phys; 2471 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2472 pst.len = CCP_SB_BYTES; 2473 pst.dir = 1; 2474 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2475 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 2476 ccp_perform_passthru(&pst, cmd_q); 2477 2478 /* CMD2 GHASH-AAD */ 2479 function.raw = 0; 2480 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD; 2481 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH; 2482 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type; 2483 2484 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2485 memset(desc, 0, Q_DESC_SIZE); 2486 2487 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2488 CCP_CMD_INIT(desc) = 1; 2489 CCP_CMD_FUNCTION(desc) = function.raw; 2490 2491 CCP_CMD_LEN(desc) = session->auth.aad_length; 2492 2493 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr); 2494 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr); 2495 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2496 2497 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2498 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2499 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2500 2501 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2502 2503 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2504 rte_wmb(); 2505 2506 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2507 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2508 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2509 cmd_q->qcontrol | CMD_Q_RUN); 2510 2511 /* CMD3 : GCTR Plain text */ 2512 function.raw = 0; 2513 CCP_AES_ENCRYPT(&function) = session->cipher.dir; 2514 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR; 2515 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type; 2516 if (non_align_len == 0) 2517 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1; 2518 else 2519 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1; 2520 2521 2522 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2523 memset(desc, 0, Q_DESC_SIZE); 2524 2525 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2526 CCP_CMD_EOM(desc) = 1; 2527 CCP_CMD_FUNCTION(desc) = function.raw; 2528 2529 CCP_CMD_LEN(desc) = length; 2530 2531 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr); 2532 CCP_CMD_SRC_HI(desc) = high32_value(src_addr); 2533 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2534 2535 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr); 2536 CCP_CMD_DST_HI(desc) = high32_value(dest_addr); 2537 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2538 2539 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2540 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2541 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2542 2543 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2544 2545 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2546 rte_wmb(); 2547 2548 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2549 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2550 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2551 cmd_q->qcontrol | CMD_Q_RUN); 2552 2553 /* CMD4 : PT to copy IV */ 2554 pst.src_addr = session->cipher.nonce_phys; 2555 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES); 2556 pst.len = AES_BLOCK_SIZE; 2557 pst.dir = 1; 2558 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP; 2559 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP; 2560 ccp_perform_passthru(&pst, cmd_q); 2561 2562 /* CMD5 : GHASH-Final */ 2563 function.raw = 0; 2564 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL; 2565 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH; 2566 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type; 2567 2568 desc = &cmd_q->qbase_desc[cmd_q->qidx]; 2569 memset(desc, 0, Q_DESC_SIZE); 2570 2571 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES; 2572 CCP_CMD_FUNCTION(desc) = function.raw; 2573 /* Last block (AAD_len || PT_len)*/ 2574 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE; 2575 2576 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE); 2577 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE); 2578 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2579 2580 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr); 2581 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr); 2582 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2583 2584 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr); 2585 CCP_CMD_KEY_HI(desc) = high32_value(key_addr); 2586 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM; 2587 2588 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv; 2589 2590 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE; 2591 rte_wmb(); 2592 2593 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE); 2594 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail); 2595 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2596 cmd_q->qcontrol | CMD_Q_RUN); 2597 2598 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 2599 return 0; 2600 } 2601 2602 static inline int 2603 ccp_crypto_cipher(struct rte_crypto_op *op, 2604 struct ccp_queue *cmd_q, 2605 struct ccp_batch_info *b_info) 2606 { 2607 int result = 0; 2608 struct ccp_session *session; 2609 2610 session = (struct ccp_session *)get_sym_session_private_data( 2611 op->sym->session, 2612 ccp_cryptodev_driver_id); 2613 2614 switch (session->cipher.algo) { 2615 case CCP_CIPHER_ALGO_AES_CBC: 2616 result = ccp_perform_aes(op, cmd_q, b_info); 2617 b_info->desccnt += 2; 2618 break; 2619 case CCP_CIPHER_ALGO_AES_CTR: 2620 result = ccp_perform_aes(op, cmd_q, b_info); 2621 b_info->desccnt += 2; 2622 break; 2623 case CCP_CIPHER_ALGO_AES_ECB: 2624 result = ccp_perform_aes(op, cmd_q, b_info); 2625 b_info->desccnt += 1; 2626 break; 2627 case CCP_CIPHER_ALGO_3DES_CBC: 2628 result = ccp_perform_3des(op, cmd_q, b_info); 2629 b_info->desccnt += 2; 2630 break; 2631 default: 2632 CCP_LOG_ERR("Unsupported cipher algo %d", 2633 session->cipher.algo); 2634 return -ENOTSUP; 2635 } 2636 return result; 2637 } 2638 2639 static inline int 2640 ccp_crypto_auth(struct rte_crypto_op *op, 2641 struct ccp_queue *cmd_q, 2642 struct ccp_batch_info *b_info) 2643 { 2644 2645 int result = 0; 2646 struct ccp_session *session; 2647 2648 session = (struct ccp_session *)get_sym_session_private_data( 2649 op->sym->session, 2650 ccp_cryptodev_driver_id); 2651 2652 switch (session->auth.algo) { 2653 case CCP_AUTH_ALGO_SHA1: 2654 case CCP_AUTH_ALGO_SHA224: 2655 case CCP_AUTH_ALGO_SHA256: 2656 case CCP_AUTH_ALGO_SHA384: 2657 case CCP_AUTH_ALGO_SHA512: 2658 result = ccp_perform_sha(op, cmd_q); 2659 b_info->desccnt += 3; 2660 break; 2661 case CCP_AUTH_ALGO_MD5_HMAC: 2662 if (session->auth_opt == 0) 2663 result = -1; 2664 break; 2665 case CCP_AUTH_ALGO_SHA1_HMAC: 2666 case CCP_AUTH_ALGO_SHA224_HMAC: 2667 case CCP_AUTH_ALGO_SHA256_HMAC: 2668 if (session->auth_opt == 0) { 2669 result = ccp_perform_hmac(op, cmd_q); 2670 b_info->desccnt += 6; 2671 } 2672 break; 2673 case CCP_AUTH_ALGO_SHA384_HMAC: 2674 case CCP_AUTH_ALGO_SHA512_HMAC: 2675 if (session->auth_opt == 0) { 2676 result = ccp_perform_hmac(op, cmd_q); 2677 b_info->desccnt += 7; 2678 } 2679 break; 2680 case CCP_AUTH_ALGO_SHA3_224: 2681 case CCP_AUTH_ALGO_SHA3_256: 2682 case CCP_AUTH_ALGO_SHA3_384: 2683 case CCP_AUTH_ALGO_SHA3_512: 2684 result = ccp_perform_sha3(op, cmd_q); 2685 b_info->desccnt += 1; 2686 break; 2687 case CCP_AUTH_ALGO_SHA3_224_HMAC: 2688 case CCP_AUTH_ALGO_SHA3_256_HMAC: 2689 result = ccp_perform_sha3_hmac(op, cmd_q); 2690 b_info->desccnt += 3; 2691 break; 2692 case CCP_AUTH_ALGO_SHA3_384_HMAC: 2693 case CCP_AUTH_ALGO_SHA3_512_HMAC: 2694 result = ccp_perform_sha3_hmac(op, cmd_q); 2695 b_info->desccnt += 4; 2696 break; 2697 case CCP_AUTH_ALGO_AES_CMAC: 2698 result = ccp_perform_aes_cmac(op, cmd_q); 2699 b_info->desccnt += 4; 2700 break; 2701 default: 2702 CCP_LOG_ERR("Unsupported auth algo %d", 2703 session->auth.algo); 2704 return -ENOTSUP; 2705 } 2706 2707 return result; 2708 } 2709 2710 static inline int 2711 ccp_crypto_aead(struct rte_crypto_op *op, 2712 struct ccp_queue *cmd_q, 2713 struct ccp_batch_info *b_info) 2714 { 2715 int result = 0; 2716 struct ccp_session *session; 2717 2718 session = (struct ccp_session *)get_sym_session_private_data( 2719 op->sym->session, 2720 ccp_cryptodev_driver_id); 2721 2722 switch (session->auth.algo) { 2723 case CCP_AUTH_ALGO_AES_GCM: 2724 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) { 2725 CCP_LOG_ERR("Incorrect chain order"); 2726 return -1; 2727 } 2728 result = ccp_perform_aes_gcm(op, cmd_q); 2729 b_info->desccnt += 5; 2730 break; 2731 default: 2732 CCP_LOG_ERR("Unsupported aead algo %d", 2733 session->aead_algo); 2734 return -ENOTSUP; 2735 } 2736 return result; 2737 } 2738 2739 int 2740 process_ops_to_enqueue(struct ccp_qp *qp, 2741 struct rte_crypto_op **op, 2742 struct ccp_queue *cmd_q, 2743 uint16_t nb_ops, 2744 uint16_t total_nb_ops, 2745 int slots_req, 2746 uint16_t b_idx) 2747 { 2748 int i, result = 0; 2749 struct ccp_batch_info *b_info; 2750 struct ccp_session *session; 2751 EVP_MD_CTX *auth_ctx = NULL; 2752 2753 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) { 2754 CCP_LOG_ERR("batch info allocation failed"); 2755 return 0; 2756 } 2757 2758 auth_ctx = EVP_MD_CTX_create(); 2759 if (unlikely(!auth_ctx)) { 2760 CCP_LOG_ERR("Unable to create auth ctx"); 2761 return 0; 2762 } 2763 b_info->auth_ctr = 0; 2764 2765 /* populate batch info necessary for dequeue */ 2766 b_info->op_idx = 0; 2767 b_info->b_idx = 0; 2768 b_info->lsb_buf_idx = 0; 2769 b_info->desccnt = 0; 2770 b_info->cmd_q = cmd_q; 2771 if (iommu_mode == 2) 2772 b_info->lsb_buf_phys = 2773 (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf); 2774 else 2775 b_info->lsb_buf_phys = 2776 (phys_addr_t)rte_mem_virt2phy((void *)b_info->lsb_buf); 2777 2778 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req); 2779 2780 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * 2781 Q_DESC_SIZE); 2782 for (i = b_idx; i < (nb_ops+b_idx); i++) { 2783 session = (struct ccp_session *)get_sym_session_private_data( 2784 op[i]->sym->session, 2785 ccp_cryptodev_driver_id); 2786 switch (session->cmd_id) { 2787 case CCP_CMD_CIPHER: 2788 result = ccp_crypto_cipher(op[i], cmd_q, b_info); 2789 break; 2790 case CCP_CMD_AUTH: 2791 if (session->auth_opt) { 2792 b_info->auth_ctr++; 2793 result = cpu_crypto_auth(qp, op[i], 2794 session, auth_ctx); 2795 } else 2796 result = ccp_crypto_auth(op[i], cmd_q, b_info); 2797 break; 2798 case CCP_CMD_CIPHER_HASH: 2799 result = ccp_crypto_cipher(op[i], cmd_q, b_info); 2800 if (result) 2801 break; 2802 result = ccp_crypto_auth(op[i], cmd_q, b_info); 2803 break; 2804 case CCP_CMD_HASH_CIPHER: 2805 if (session->auth_opt) { 2806 result = cpu_crypto_auth(qp, op[i], 2807 session, auth_ctx); 2808 if (op[i]->status != 2809 RTE_CRYPTO_OP_STATUS_SUCCESS) 2810 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED"); 2811 } else 2812 result = ccp_crypto_auth(op[i], cmd_q, b_info); 2813 2814 if (result) 2815 break; 2816 result = ccp_crypto_cipher(op[i], cmd_q, b_info); 2817 break; 2818 case CCP_CMD_COMBINED: 2819 result = ccp_crypto_aead(op[i], cmd_q, b_info); 2820 break; 2821 default: 2822 CCP_LOG_ERR("Unsupported cmd_id"); 2823 result = -1; 2824 } 2825 if (unlikely(result < 0)) { 2826 rte_atomic64_add(&b_info->cmd_q->free_slots, 2827 (slots_req - b_info->desccnt)); 2828 break; 2829 } 2830 b_info->op[i] = op[i]; 2831 } 2832 2833 b_info->opcnt = i; 2834 b_info->b_idx = b_idx; 2835 b_info->total_nb_ops = total_nb_ops; 2836 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * 2837 Q_DESC_SIZE); 2838 2839 rte_wmb(); 2840 /* Write the new tail address back to the queue register */ 2841 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, 2842 b_info->tail_offset); 2843 /* Turn the queue back on using our cached control register */ 2844 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE, 2845 cmd_q->qcontrol | CMD_Q_RUN); 2846 2847 rte_ring_enqueue(qp->processed_pkts, (void *)b_info); 2848 2849 EVP_MD_CTX_destroy(auth_ctx); 2850 return i-b_idx; 2851 } 2852 2853 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op) 2854 { 2855 struct ccp_session *session; 2856 uint8_t *digest_data, *addr; 2857 struct rte_mbuf *m_last; 2858 int offset, digest_offset; 2859 uint8_t digest_le[64]; 2860 2861 session = (struct ccp_session *)get_sym_session_private_data( 2862 op->sym->session, 2863 ccp_cryptodev_driver_id); 2864 2865 if (session->cmd_id == CCP_CMD_COMBINED) { 2866 digest_data = op->sym->aead.digest.data; 2867 digest_offset = op->sym->aead.data.offset + 2868 op->sym->aead.data.length; 2869 } else { 2870 digest_data = op->sym->auth.digest.data; 2871 digest_offset = op->sym->auth.data.offset + 2872 op->sym->auth.data.length; 2873 } 2874 m_last = rte_pktmbuf_lastseg(op->sym->m_src); 2875 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off + 2876 m_last->data_len - session->auth.ctx_len); 2877 2878 rte_mb(); 2879 offset = session->auth.offset; 2880 2881 if (session->auth.engine == CCP_ENGINE_SHA) 2882 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) && 2883 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) && 2884 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) { 2885 /* All other algorithms require byte 2886 * swap done by host 2887 */ 2888 unsigned int i; 2889 2890 offset = session->auth.ctx_len - 2891 session->auth.offset - 1; 2892 for (i = 0; i < session->auth.digest_length; i++) 2893 digest_le[i] = addr[offset - i]; 2894 offset = 0; 2895 addr = digest_le; 2896 } 2897 2898 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 2899 if (session->auth.op == CCP_AUTH_OP_VERIFY) { 2900 if (memcmp(addr + offset, digest_data, 2901 session->auth.digest_length) != 0) 2902 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED; 2903 2904 } else { 2905 if (unlikely(digest_data == 0)) 2906 digest_data = rte_pktmbuf_mtod_offset( 2907 op->sym->m_dst, uint8_t *, 2908 digest_offset); 2909 rte_memcpy(digest_data, addr + offset, 2910 session->auth.digest_length); 2911 } 2912 /* Trim area used for digest from mbuf. */ 2913 rte_pktmbuf_trim(op->sym->m_src, 2914 session->auth.ctx_len); 2915 } 2916 2917 static int 2918 ccp_prepare_ops(struct ccp_qp *qp, 2919 struct rte_crypto_op **op_d, 2920 struct ccp_batch_info *b_info, 2921 uint16_t nb_ops) 2922 { 2923 int i, min_ops; 2924 struct ccp_session *session; 2925 2926 EVP_MD_CTX *auth_ctx = NULL; 2927 2928 auth_ctx = EVP_MD_CTX_create(); 2929 if (unlikely(!auth_ctx)) { 2930 CCP_LOG_ERR("Unable to create auth ctx"); 2931 return 0; 2932 } 2933 min_ops = RTE_MIN(nb_ops, b_info->opcnt); 2934 2935 for (i = b_info->b_idx; i < min_ops; i++) { 2936 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++]; 2937 session = (struct ccp_session *)get_sym_session_private_data( 2938 op_d[i]->sym->session, 2939 ccp_cryptodev_driver_id); 2940 switch (session->cmd_id) { 2941 case CCP_CMD_CIPHER: 2942 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 2943 break; 2944 case CCP_CMD_AUTH: 2945 if (session->auth_opt == 0) 2946 ccp_auth_dq_prepare(op_d[i]); 2947 break; 2948 case CCP_CMD_CIPHER_HASH: 2949 if (session->auth_opt) 2950 cpu_crypto_auth(qp, op_d[i], 2951 session, auth_ctx); 2952 else 2953 ccp_auth_dq_prepare(op_d[i]); 2954 break; 2955 case CCP_CMD_HASH_CIPHER: 2956 if (session->auth_opt) 2957 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS; 2958 else 2959 ccp_auth_dq_prepare(op_d[i]); 2960 break; 2961 case CCP_CMD_COMBINED: 2962 ccp_auth_dq_prepare(op_d[i]); 2963 break; 2964 default: 2965 CCP_LOG_ERR("Unsupported cmd_id"); 2966 } 2967 } 2968 2969 EVP_MD_CTX_destroy(auth_ctx); 2970 b_info->opcnt -= min_ops; 2971 return min_ops; 2972 } 2973 2974 int 2975 process_ops_to_dequeue(struct ccp_qp *qp, 2976 struct rte_crypto_op **op, 2977 uint16_t nb_ops, 2978 uint16_t *total_nb_ops) 2979 { 2980 struct ccp_batch_info *b_info; 2981 uint32_t cur_head_offset; 2982 2983 if (qp->b_info != NULL) { 2984 b_info = qp->b_info; 2985 if (unlikely(b_info->op_idx > 0)) 2986 goto success; 2987 } else if (rte_ring_dequeue(qp->processed_pkts, 2988 (void **)&b_info)) 2989 return 0; 2990 2991 if (b_info->auth_ctr == b_info->opcnt) 2992 goto success; 2993 *total_nb_ops = b_info->total_nb_ops; 2994 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base, 2995 CMD_Q_HEAD_LO_BASE); 2996 2997 if (b_info->head_offset < b_info->tail_offset) { 2998 if ((cur_head_offset >= b_info->head_offset) && 2999 (cur_head_offset < b_info->tail_offset)) { 3000 qp->b_info = b_info; 3001 return 0; 3002 } 3003 } else if (b_info->tail_offset != b_info->head_offset) { 3004 if ((cur_head_offset >= b_info->head_offset) || 3005 (cur_head_offset < b_info->tail_offset)) { 3006 qp->b_info = b_info; 3007 return 0; 3008 } 3009 } 3010 3011 3012 success: 3013 *total_nb_ops = b_info->total_nb_ops; 3014 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops); 3015 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt); 3016 b_info->desccnt = 0; 3017 if (b_info->opcnt > 0) { 3018 qp->b_info = b_info; 3019 } else { 3020 rte_mempool_put(qp->batch_mp, (void *)b_info); 3021 qp->b_info = NULL; 3022 } 3023 3024 return nb_ops; 3025 } 3026