1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
3 */
4
5 #define OPENSSL_API_COMPAT 0x10100000L
6
7 #include <dirent.h>
8 #include <fcntl.h>
9 #include <stdio.h>
10 #include <string.h>
11 #include <sys/mman.h>
12 #include <sys/queue.h>
13 #include <sys/types.h>
14 #include <unistd.h>
15 #include <openssl/sha.h>
16 #include <openssl/cmac.h> /*sub key apis*/
17 #include <openssl/evp.h> /*sub key apis*/
18
19 #include <rte_hexdump.h>
20 #include <rte_memzone.h>
21 #include <rte_malloc.h>
22 #include <rte_memory.h>
23 #include <rte_spinlock.h>
24 #include <rte_string_fns.h>
25 #include <cryptodev_pmd.h>
26
27 #include "ccp_dev.h"
28 #include "ccp_crypto.h"
29 #include "ccp_pmd_private.h"
30
31 #include <openssl/conf.h>
32 #include <openssl/err.h>
33 #include <openssl/hmac.h>
34
35 /* SHA initial context values */
36 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
37 SHA1_H4, SHA1_H3,
38 SHA1_H2, SHA1_H1,
39 SHA1_H0, 0x0U,
40 0x0U, 0x0U,
41 };
42
43 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
44 SHA224_H7, SHA224_H6,
45 SHA224_H5, SHA224_H4,
46 SHA224_H3, SHA224_H2,
47 SHA224_H1, SHA224_H0,
48 };
49
50 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
51 SHA256_H7, SHA256_H6,
52 SHA256_H5, SHA256_H4,
53 SHA256_H3, SHA256_H2,
54 SHA256_H1, SHA256_H0,
55 };
56
57 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
58 SHA384_H7, SHA384_H6,
59 SHA384_H5, SHA384_H4,
60 SHA384_H3, SHA384_H2,
61 SHA384_H1, SHA384_H0,
62 };
63
64 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
65 SHA512_H7, SHA512_H6,
66 SHA512_H5, SHA512_H4,
67 SHA512_H3, SHA512_H2,
68 SHA512_H1, SHA512_H0,
69 };
70
71 #if defined(_MSC_VER)
72 #define SHA3_CONST(x) x
73 #else
74 #define SHA3_CONST(x) x##L
75 #endif
76
77 /** 'Words' here refers to uint64_t */
78 #define SHA3_KECCAK_SPONGE_WORDS \
79 (((1600) / 8) / sizeof(uint64_t))
80 typedef struct sha3_context_ {
81 uint64_t saved;
82 /**
83 * The portion of the input message that we
84 * didn't consume yet
85 */
86 union {
87 uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
88 /* Keccak's state */
89 uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
90 /**total 200 ctx size**/
91 };
92 unsigned int byteIndex;
93 /**
94 * 0..7--the next byte after the set one
95 * (starts from 0; 0--none are buffered)
96 */
97 unsigned int wordIndex;
98 /**
99 * 0..24--the next word to integrate input
100 * (starts from 0)
101 */
102 unsigned int capacityWords;
103 /**
104 * the double size of the hash output in
105 * words (e.g. 16 for Keccak 512)
106 */
107 } sha3_context;
108
109 #ifndef SHA3_ROTL64
110 #define SHA3_ROTL64(x, y) \
111 (((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
112 #endif
113
114 static const uint64_t keccakf_rndc[24] = {
115 SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
116 SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
117 SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
118 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
119 SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
120 SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
121 SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
122 SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
123 SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
124 SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
125 SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
126 SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
127 };
128
129 static const unsigned int keccakf_rotc[24] = {
130 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
131 18, 39, 61, 20, 44
132 };
133
134 static const unsigned int keccakf_piln[24] = {
135 10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
136 14, 22, 9, 6, 1
137 };
138
139 static enum ccp_cmd_order
ccp_get_cmd_id(const struct rte_crypto_sym_xform * xform)140 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
141 {
142 enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
143
144 if (xform == NULL)
145 return res;
146 if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
147 if (xform->next == NULL)
148 return CCP_CMD_AUTH;
149 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
150 return CCP_CMD_HASH_CIPHER;
151 }
152 if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
153 if (xform->next == NULL)
154 return CCP_CMD_CIPHER;
155 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
156 return CCP_CMD_CIPHER_HASH;
157 }
158 if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
159 return CCP_CMD_COMBINED;
160 return res;
161 }
162
163 /* partial hash using openssl */
partial_hash_sha1(uint8_t * data_in,uint8_t * data_out)164 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
165 {
166 SHA_CTX ctx;
167
168 if (!SHA1_Init(&ctx))
169 return -EFAULT;
170 SHA1_Transform(&ctx, data_in);
171 rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
172 return 0;
173 }
174
partial_hash_sha224(uint8_t * data_in,uint8_t * data_out)175 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
176 {
177 SHA256_CTX ctx;
178
179 if (!SHA224_Init(&ctx))
180 return -EFAULT;
181 SHA256_Transform(&ctx, data_in);
182 rte_memcpy(data_out, &ctx,
183 SHA256_DIGEST_LENGTH);
184 return 0;
185 }
186
partial_hash_sha256(uint8_t * data_in,uint8_t * data_out)187 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
188 {
189 SHA256_CTX ctx;
190
191 if (!SHA256_Init(&ctx))
192 return -EFAULT;
193 SHA256_Transform(&ctx, data_in);
194 rte_memcpy(data_out, &ctx,
195 SHA256_DIGEST_LENGTH);
196 return 0;
197 }
198
partial_hash_sha384(uint8_t * data_in,uint8_t * data_out)199 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
200 {
201 SHA512_CTX ctx;
202
203 if (!SHA384_Init(&ctx))
204 return -EFAULT;
205 SHA512_Transform(&ctx, data_in);
206 rte_memcpy(data_out, &ctx,
207 SHA512_DIGEST_LENGTH);
208 return 0;
209 }
210
partial_hash_sha512(uint8_t * data_in,uint8_t * data_out)211 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
212 {
213 SHA512_CTX ctx;
214
215 if (!SHA512_Init(&ctx))
216 return -EFAULT;
217 SHA512_Transform(&ctx, data_in);
218 rte_memcpy(data_out, &ctx,
219 SHA512_DIGEST_LENGTH);
220 return 0;
221 }
222
223 static void
keccakf(uint64_t s[25])224 keccakf(uint64_t s[25])
225 {
226 int i, j, round;
227 uint64_t t, bc[5];
228 #define KECCAK_ROUNDS 24
229
230 for (round = 0; round < KECCAK_ROUNDS; round++) {
231
232 /* Theta */
233 for (i = 0; i < 5; i++)
234 bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
235 s[i + 20];
236
237 for (i = 0; i < 5; i++) {
238 t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
239 for (j = 0; j < 25; j += 5)
240 s[j + i] ^= t;
241 }
242
243 /* Rho Pi */
244 t = s[1];
245 for (i = 0; i < 24; i++) {
246 j = keccakf_piln[i];
247 bc[0] = s[j];
248 s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
249 t = bc[0];
250 }
251
252 /* Chi */
253 for (j = 0; j < 25; j += 5) {
254 for (i = 0; i < 5; i++)
255 bc[i] = s[j + i];
256 for (i = 0; i < 5; i++)
257 s[j + i] ^= (~bc[(i + 1) % 5]) &
258 bc[(i + 2) % 5];
259 }
260
261 /* Iota */
262 s[0] ^= keccakf_rndc[round];
263 }
264 }
265
266 static void
sha3_Init224(void * priv)267 sha3_Init224(void *priv)
268 {
269 sha3_context *ctx = (sha3_context *) priv;
270
271 memset(ctx, 0, sizeof(*ctx));
272 ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
273 }
274
275 static void
sha3_Init256(void * priv)276 sha3_Init256(void *priv)
277 {
278 sha3_context *ctx = (sha3_context *) priv;
279
280 memset(ctx, 0, sizeof(*ctx));
281 ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
282 }
283
284 static void
sha3_Init384(void * priv)285 sha3_Init384(void *priv)
286 {
287 sha3_context *ctx = (sha3_context *) priv;
288
289 memset(ctx, 0, sizeof(*ctx));
290 ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
291 }
292
293 static void
sha3_Init512(void * priv)294 sha3_Init512(void *priv)
295 {
296 sha3_context *ctx = (sha3_context *) priv;
297
298 memset(ctx, 0, sizeof(*ctx));
299 ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
300 }
301
302
303 /* This is simply the 'update' with the padding block.
304 * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
305 * bytes are always present, but they can be the same byte.
306 */
307 static void
sha3_Update(void * priv,void const * bufIn,size_t len)308 sha3_Update(void *priv, void const *bufIn, size_t len)
309 {
310 sha3_context *ctx = (sha3_context *) priv;
311 unsigned int old_tail = (8 - ctx->byteIndex) & 7;
312 size_t words;
313 unsigned int tail;
314 size_t i;
315 const uint8_t *buf = bufIn;
316
317 if (len < old_tail) {
318 while (len--)
319 ctx->saved |= (uint64_t) (*(buf++)) <<
320 ((ctx->byteIndex++) * 8);
321 return;
322 }
323
324 if (old_tail) {
325 len -= old_tail;
326 while (old_tail--)
327 ctx->saved |= (uint64_t) (*(buf++)) <<
328 ((ctx->byteIndex++) * 8);
329
330 ctx->s[ctx->wordIndex] ^= ctx->saved;
331 ctx->byteIndex = 0;
332 ctx->saved = 0;
333 if (++ctx->wordIndex ==
334 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
335 keccakf(ctx->s);
336 ctx->wordIndex = 0;
337 }
338 }
339
340 words = len / sizeof(uint64_t);
341 tail = len - words * sizeof(uint64_t);
342
343 for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
344 const uint64_t t = (uint64_t) (buf[0]) |
345 ((uint64_t) (buf[1]) << 8 * 1) |
346 ((uint64_t) (buf[2]) << 8 * 2) |
347 ((uint64_t) (buf[3]) << 8 * 3) |
348 ((uint64_t) (buf[4]) << 8 * 4) |
349 ((uint64_t) (buf[5]) << 8 * 5) |
350 ((uint64_t) (buf[6]) << 8 * 6) |
351 ((uint64_t) (buf[7]) << 8 * 7);
352 ctx->s[ctx->wordIndex] ^= t;
353 if (++ctx->wordIndex ==
354 (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
355 keccakf(ctx->s);
356 ctx->wordIndex = 0;
357 }
358 }
359
360 while (tail--)
361 ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
362 }
363
partial_hash_sha3_224(uint8_t * data_in,uint8_t * data_out)364 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
365 {
366 sha3_context *ctx;
367 int i;
368
369 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
370 if (!ctx) {
371 CCP_LOG_ERR("sha3-ctx creation failed");
372 return -ENOMEM;
373 }
374 sha3_Init224(ctx);
375 sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
376 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
377 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
378 rte_free(ctx);
379
380 return 0;
381 }
382
partial_hash_sha3_256(uint8_t * data_in,uint8_t * data_out)383 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
384 {
385 sha3_context *ctx;
386 int i;
387
388 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
389 if (!ctx) {
390 CCP_LOG_ERR("sha3-ctx creation failed");
391 return -ENOMEM;
392 }
393 sha3_Init256(ctx);
394 sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
395 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
396 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
397 rte_free(ctx);
398
399 return 0;
400 }
401
partial_hash_sha3_384(uint8_t * data_in,uint8_t * data_out)402 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
403 {
404 sha3_context *ctx;
405 int i;
406
407 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
408 if (!ctx) {
409 CCP_LOG_ERR("sha3-ctx creation failed");
410 return -ENOMEM;
411 }
412 sha3_Init384(ctx);
413 sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
414 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
415 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
416 rte_free(ctx);
417
418 return 0;
419 }
420
partial_hash_sha3_512(uint8_t * data_in,uint8_t * data_out)421 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
422 {
423 sha3_context *ctx;
424 int i;
425
426 ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
427 if (!ctx) {
428 CCP_LOG_ERR("sha3-ctx creation failed");
429 return -ENOMEM;
430 }
431 sha3_Init512(ctx);
432 sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
433 for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
434 *data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
435 rte_free(ctx);
436
437 return 0;
438 }
439
generate_partial_hash(struct ccp_session * sess)440 static int generate_partial_hash(struct ccp_session *sess)
441 {
442
443 uint8_t ipad[sess->auth.block_size];
444 uint8_t opad[sess->auth.block_size];
445 uint8_t *ipad_t, *opad_t;
446 uint32_t *hash_value_be32, hash_temp32[8];
447 uint64_t *hash_value_be64, hash_temp64[8];
448 int i, count;
449 uint8_t *hash_value_sha3;
450
451 opad_t = ipad_t = (uint8_t *)sess->auth.key;
452
453 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
454 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
455
456 /* considering key size is always equal to block size of algorithm */
457 for (i = 0; i < sess->auth.block_size; i++) {
458 ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
459 opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
460 }
461
462 switch (sess->auth.algo) {
463 case CCP_AUTH_ALGO_SHA1_HMAC:
464 count = SHA1_DIGEST_SIZE >> 2;
465
466 if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
467 return -1;
468 for (i = 0; i < count; i++, hash_value_be32++)
469 *hash_value_be32 = hash_temp32[count - 1 - i];
470
471 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
472 + sess->auth.ctx_len);
473 if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
474 return -1;
475 for (i = 0; i < count; i++, hash_value_be32++)
476 *hash_value_be32 = hash_temp32[count - 1 - i];
477 return 0;
478 case CCP_AUTH_ALGO_SHA224_HMAC:
479 count = SHA256_DIGEST_SIZE >> 2;
480
481 if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
482 return -1;
483 for (i = 0; i < count; i++, hash_value_be32++)
484 *hash_value_be32 = hash_temp32[count - 1 - i];
485
486 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
487 + sess->auth.ctx_len);
488 if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
489 return -1;
490 for (i = 0; i < count; i++, hash_value_be32++)
491 *hash_value_be32 = hash_temp32[count - 1 - i];
492 return 0;
493 case CCP_AUTH_ALGO_SHA3_224_HMAC:
494 hash_value_sha3 = sess->auth.pre_compute;
495 if (partial_hash_sha3_224(ipad, hash_value_sha3))
496 return -1;
497
498 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
499 + sess->auth.ctx_len);
500 if (partial_hash_sha3_224(opad, hash_value_sha3))
501 return -1;
502 return 0;
503 case CCP_AUTH_ALGO_SHA256_HMAC:
504 count = SHA256_DIGEST_SIZE >> 2;
505
506 if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
507 return -1;
508 for (i = 0; i < count; i++, hash_value_be32++)
509 *hash_value_be32 = hash_temp32[count - 1 - i];
510
511 hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
512 + sess->auth.ctx_len);
513 if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
514 return -1;
515 for (i = 0; i < count; i++, hash_value_be32++)
516 *hash_value_be32 = hash_temp32[count - 1 - i];
517 return 0;
518 case CCP_AUTH_ALGO_SHA3_256_HMAC:
519 hash_value_sha3 = sess->auth.pre_compute;
520 if (partial_hash_sha3_256(ipad, hash_value_sha3))
521 return -1;
522
523 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
524 + sess->auth.ctx_len);
525 if (partial_hash_sha3_256(opad, hash_value_sha3))
526 return -1;
527 return 0;
528 case CCP_AUTH_ALGO_SHA384_HMAC:
529 count = SHA512_DIGEST_SIZE >> 3;
530
531 if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
532 return -1;
533 for (i = 0; i < count; i++, hash_value_be64++)
534 *hash_value_be64 = hash_temp64[count - 1 - i];
535
536 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
537 + sess->auth.ctx_len);
538 if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
539 return -1;
540 for (i = 0; i < count; i++, hash_value_be64++)
541 *hash_value_be64 = hash_temp64[count - 1 - i];
542 return 0;
543 case CCP_AUTH_ALGO_SHA3_384_HMAC:
544 hash_value_sha3 = sess->auth.pre_compute;
545 if (partial_hash_sha3_384(ipad, hash_value_sha3))
546 return -1;
547
548 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
549 + sess->auth.ctx_len);
550 if (partial_hash_sha3_384(opad, hash_value_sha3))
551 return -1;
552 return 0;
553 case CCP_AUTH_ALGO_SHA512_HMAC:
554 count = SHA512_DIGEST_SIZE >> 3;
555
556 if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
557 return -1;
558 for (i = 0; i < count; i++, hash_value_be64++)
559 *hash_value_be64 = hash_temp64[count - 1 - i];
560
561 hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
562 + sess->auth.ctx_len);
563 if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
564 return -1;
565 for (i = 0; i < count; i++, hash_value_be64++)
566 *hash_value_be64 = hash_temp64[count - 1 - i];
567 return 0;
568 case CCP_AUTH_ALGO_SHA3_512_HMAC:
569 hash_value_sha3 = sess->auth.pre_compute;
570 if (partial_hash_sha3_512(ipad, hash_value_sha3))
571 return -1;
572
573 hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
574 + sess->auth.ctx_len);
575 if (partial_hash_sha3_512(opad, hash_value_sha3))
576 return -1;
577 return 0;
578 default:
579 CCP_LOG_ERR("Invalid auth algo");
580 return -1;
581 }
582 }
583
584 /* prepare temporary keys K1 and K2 */
prepare_key(unsigned char * k,unsigned char * l,int bl)585 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
586 {
587 int i;
588 /* Shift block to left, including carry */
589 for (i = 0; i < bl; i++) {
590 k[i] = l[i] << 1;
591 if (i < bl - 1 && l[i + 1] & 0x80)
592 k[i] |= 1;
593 }
594 /* If MSB set fixup with R */
595 if (l[0] & 0x80)
596 k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
597 }
598
599 /* subkeys K1 and K2 generation for CMAC */
600 static int
generate_cmac_subkeys(struct ccp_session * sess)601 generate_cmac_subkeys(struct ccp_session *sess)
602 {
603 const EVP_CIPHER *algo;
604 EVP_CIPHER_CTX *ctx;
605 unsigned char *ccp_ctx;
606 size_t i;
607 int dstlen, totlen;
608 unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
609 unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
610 unsigned char k1[AES_BLOCK_SIZE] = {0};
611 unsigned char k2[AES_BLOCK_SIZE] = {0};
612
613 if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
614 algo = EVP_aes_128_cbc();
615 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
616 algo = EVP_aes_192_cbc();
617 else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
618 algo = EVP_aes_256_cbc();
619 else {
620 CCP_LOG_ERR("Invalid CMAC type length");
621 return -1;
622 }
623
624 ctx = EVP_CIPHER_CTX_new();
625 if (!ctx) {
626 CCP_LOG_ERR("ctx creation failed");
627 return -1;
628 }
629 if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
630 (unsigned char *)zero_iv) <= 0)
631 goto key_generate_err;
632 if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
633 goto key_generate_err;
634 if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
635 AES_BLOCK_SIZE) <= 0)
636 goto key_generate_err;
637 if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
638 goto key_generate_err;
639
640 memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
641
642 ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
643 prepare_key(k1, dst, AES_BLOCK_SIZE);
644 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
645 *ccp_ctx = k1[i];
646
647 ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
648 (2 * CCP_SB_BYTES) - 1);
649 prepare_key(k2, k1, AES_BLOCK_SIZE);
650 for (i = 0; i < AES_BLOCK_SIZE; i++, ccp_ctx--)
651 *ccp_ctx = k2[i];
652
653 EVP_CIPHER_CTX_free(ctx);
654
655 return 0;
656
657 key_generate_err:
658 CCP_LOG_ERR("CMAC Init failed");
659 return -1;
660 }
661
662 /* configure session */
663 static int
ccp_configure_session_cipher(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)664 ccp_configure_session_cipher(struct ccp_session *sess,
665 const struct rte_crypto_sym_xform *xform)
666 {
667 const struct rte_crypto_cipher_xform *cipher_xform = NULL;
668 size_t i, j, x;
669
670 cipher_xform = &xform->cipher;
671
672 /* set cipher direction */
673 if (cipher_xform->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
674 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
675 else
676 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
677
678 /* set cipher key */
679 sess->cipher.key_length = cipher_xform->key.length;
680 rte_memcpy(sess->cipher.key, cipher_xform->key.data,
681 cipher_xform->key.length);
682
683 /* set iv parameters */
684 sess->iv.offset = cipher_xform->iv.offset;
685 sess->iv.length = cipher_xform->iv.length;
686
687 switch (cipher_xform->algo) {
688 case RTE_CRYPTO_CIPHER_AES_CTR:
689 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
690 sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
691 sess->cipher.engine = CCP_ENGINE_AES;
692 break;
693 case RTE_CRYPTO_CIPHER_AES_ECB:
694 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
695 sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
696 sess->cipher.engine = CCP_ENGINE_AES;
697 break;
698 case RTE_CRYPTO_CIPHER_AES_CBC:
699 sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
700 sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
701 sess->cipher.engine = CCP_ENGINE_AES;
702 break;
703 case RTE_CRYPTO_CIPHER_3DES_CBC:
704 sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
705 sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
706 sess->cipher.engine = CCP_ENGINE_3DES;
707 break;
708 default:
709 CCP_LOG_ERR("Unsupported cipher algo");
710 return -1;
711 }
712
713
714 switch (sess->cipher.engine) {
715 case CCP_ENGINE_AES:
716 if (sess->cipher.key_length == 16)
717 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
718 else if (sess->cipher.key_length == 24)
719 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
720 else if (sess->cipher.key_length == 32)
721 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
722 else {
723 CCP_LOG_ERR("Invalid cipher key length");
724 return -1;
725 }
726 for (i = 0; i < sess->cipher.key_length ; i++)
727 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
728 sess->cipher.key[i];
729 break;
730 case CCP_ENGINE_3DES:
731 if (sess->cipher.key_length == 16)
732 sess->cipher.ut.des_type = CCP_DES_TYPE_128;
733 else if (sess->cipher.key_length == 24)
734 sess->cipher.ut.des_type = CCP_DES_TYPE_192;
735 else {
736 CCP_LOG_ERR("Invalid cipher key length");
737 return -1;
738 }
739 for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
740 for (i = 0; i < 8; i++)
741 sess->cipher.key_ccp[(8 + x) - i - 1] =
742 sess->cipher.key[i + x];
743 break;
744 default:
745 CCP_LOG_ERR("Invalid CCP Engine");
746 return -ENOTSUP;
747 }
748 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
749 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
750 return 0;
751 }
752
753 static int
ccp_configure_session_auth(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)754 ccp_configure_session_auth(struct ccp_session *sess,
755 const struct rte_crypto_sym_xform *xform)
756 {
757 const struct rte_crypto_auth_xform *auth_xform = NULL;
758 size_t i;
759
760 auth_xform = &xform->auth;
761
762 sess->auth.digest_length = auth_xform->digest_length;
763 if (auth_xform->op == RTE_CRYPTO_AUTH_OP_GENERATE)
764 sess->auth.op = CCP_AUTH_OP_GENERATE;
765 else
766 sess->auth.op = CCP_AUTH_OP_VERIFY;
767 switch (auth_xform->algo) {
768 case RTE_CRYPTO_AUTH_MD5_HMAC:
769 if (sess->auth_opt) {
770 sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
771 sess->auth.offset = ((CCP_SB_BYTES << 1) -
772 MD5_DIGEST_SIZE);
773 sess->auth.key_length = auth_xform->key.length;
774 sess->auth.block_size = MD5_BLOCK_SIZE;
775 memset(sess->auth.key, 0, sess->auth.block_size);
776 rte_memcpy(sess->auth.key, auth_xform->key.data,
777 auth_xform->key.length);
778 } else
779 return -1; /* HMAC MD5 not supported on CCP */
780 break;
781 case RTE_CRYPTO_AUTH_SHA1:
782 sess->auth.engine = CCP_ENGINE_SHA;
783 sess->auth.algo = CCP_AUTH_ALGO_SHA1;
784 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
785 sess->auth.ctx = (void *)ccp_sha1_init;
786 sess->auth.ctx_len = CCP_SB_BYTES;
787 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
788 break;
789 case RTE_CRYPTO_AUTH_SHA1_HMAC:
790 if (sess->auth_opt) {
791 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
792 return -1;
793 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
794 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
795 sess->auth.block_size = SHA1_BLOCK_SIZE;
796 sess->auth.key_length = auth_xform->key.length;
797 memset(sess->auth.key, 0, sess->auth.block_size);
798 rte_memcpy(sess->auth.key, auth_xform->key.data,
799 auth_xform->key.length);
800 } else {
801 if (auth_xform->key.length > SHA1_BLOCK_SIZE)
802 return -1;
803 sess->auth.engine = CCP_ENGINE_SHA;
804 sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
805 sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
806 sess->auth.ctx_len = CCP_SB_BYTES;
807 sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
808 sess->auth.block_size = SHA1_BLOCK_SIZE;
809 sess->auth.key_length = auth_xform->key.length;
810 memset(sess->auth.key, 0, sess->auth.block_size);
811 memset(sess->auth.pre_compute, 0,
812 sess->auth.ctx_len << 1);
813 rte_memcpy(sess->auth.key, auth_xform->key.data,
814 auth_xform->key.length);
815 if (generate_partial_hash(sess))
816 return -1;
817 }
818 break;
819 case RTE_CRYPTO_AUTH_SHA224:
820 sess->auth.algo = CCP_AUTH_ALGO_SHA224;
821 sess->auth.engine = CCP_ENGINE_SHA;
822 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
823 sess->auth.ctx = (void *)ccp_sha224_init;
824 sess->auth.ctx_len = CCP_SB_BYTES;
825 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
826 break;
827 case RTE_CRYPTO_AUTH_SHA224_HMAC:
828 if (sess->auth_opt) {
829 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
830 return -1;
831 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
832 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
833 sess->auth.block_size = SHA224_BLOCK_SIZE;
834 sess->auth.key_length = auth_xform->key.length;
835 memset(sess->auth.key, 0, sess->auth.block_size);
836 rte_memcpy(sess->auth.key, auth_xform->key.data,
837 auth_xform->key.length);
838 } else {
839 if (auth_xform->key.length > SHA224_BLOCK_SIZE)
840 return -1;
841 sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
842 sess->auth.engine = CCP_ENGINE_SHA;
843 sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
844 sess->auth.ctx_len = CCP_SB_BYTES;
845 sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
846 sess->auth.block_size = SHA224_BLOCK_SIZE;
847 sess->auth.key_length = auth_xform->key.length;
848 memset(sess->auth.key, 0, sess->auth.block_size);
849 memset(sess->auth.pre_compute, 0,
850 sess->auth.ctx_len << 1);
851 rte_memcpy(sess->auth.key, auth_xform->key.data,
852 auth_xform->key.length);
853 if (generate_partial_hash(sess))
854 return -1;
855 }
856 break;
857 case RTE_CRYPTO_AUTH_SHA3_224:
858 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
859 sess->auth.engine = CCP_ENGINE_SHA;
860 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
861 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
862 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
863 break;
864 case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
865 if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
866 return -1;
867 sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
868 sess->auth.engine = CCP_ENGINE_SHA;
869 sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
870 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
871 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
872 sess->auth.block_size = SHA3_224_BLOCK_SIZE;
873 sess->auth.key_length = auth_xform->key.length;
874 memset(sess->auth.key, 0, sess->auth.block_size);
875 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
876 rte_memcpy(sess->auth.key, auth_xform->key.data,
877 auth_xform->key.length);
878 if (generate_partial_hash(sess))
879 return -1;
880 break;
881 case RTE_CRYPTO_AUTH_SHA256:
882 sess->auth.algo = CCP_AUTH_ALGO_SHA256;
883 sess->auth.engine = CCP_ENGINE_SHA;
884 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
885 sess->auth.ctx = (void *)ccp_sha256_init;
886 sess->auth.ctx_len = CCP_SB_BYTES;
887 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
888 break;
889 case RTE_CRYPTO_AUTH_SHA256_HMAC:
890 if (sess->auth_opt) {
891 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
892 return -1;
893 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
894 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
895 sess->auth.block_size = SHA256_BLOCK_SIZE;
896 sess->auth.key_length = auth_xform->key.length;
897 memset(sess->auth.key, 0, sess->auth.block_size);
898 rte_memcpy(sess->auth.key, auth_xform->key.data,
899 auth_xform->key.length);
900 } else {
901 if (auth_xform->key.length > SHA256_BLOCK_SIZE)
902 return -1;
903 sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
904 sess->auth.engine = CCP_ENGINE_SHA;
905 sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
906 sess->auth.ctx_len = CCP_SB_BYTES;
907 sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
908 sess->auth.block_size = SHA256_BLOCK_SIZE;
909 sess->auth.key_length = auth_xform->key.length;
910 memset(sess->auth.key, 0, sess->auth.block_size);
911 memset(sess->auth.pre_compute, 0,
912 sess->auth.ctx_len << 1);
913 rte_memcpy(sess->auth.key, auth_xform->key.data,
914 auth_xform->key.length);
915 if (generate_partial_hash(sess))
916 return -1;
917 }
918 break;
919 case RTE_CRYPTO_AUTH_SHA3_256:
920 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
921 sess->auth.engine = CCP_ENGINE_SHA;
922 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
923 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
924 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
925 break;
926 case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
927 if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
928 return -1;
929 sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
930 sess->auth.engine = CCP_ENGINE_SHA;
931 sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
932 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
933 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
934 sess->auth.block_size = SHA3_256_BLOCK_SIZE;
935 sess->auth.key_length = auth_xform->key.length;
936 memset(sess->auth.key, 0, sess->auth.block_size);
937 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
938 rte_memcpy(sess->auth.key, auth_xform->key.data,
939 auth_xform->key.length);
940 if (generate_partial_hash(sess))
941 return -1;
942 break;
943 case RTE_CRYPTO_AUTH_SHA384:
944 sess->auth.algo = CCP_AUTH_ALGO_SHA384;
945 sess->auth.engine = CCP_ENGINE_SHA;
946 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
947 sess->auth.ctx = (void *)ccp_sha384_init;
948 sess->auth.ctx_len = CCP_SB_BYTES << 1;
949 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
950 break;
951 case RTE_CRYPTO_AUTH_SHA384_HMAC:
952 if (sess->auth_opt) {
953 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
954 return -1;
955 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
956 sess->auth.offset = ((CCP_SB_BYTES << 1) -
957 SHA384_DIGEST_SIZE);
958 sess->auth.block_size = SHA384_BLOCK_SIZE;
959 sess->auth.key_length = auth_xform->key.length;
960 memset(sess->auth.key, 0, sess->auth.block_size);
961 rte_memcpy(sess->auth.key, auth_xform->key.data,
962 auth_xform->key.length);
963 } else {
964 if (auth_xform->key.length > SHA384_BLOCK_SIZE)
965 return -1;
966 sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
967 sess->auth.engine = CCP_ENGINE_SHA;
968 sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
969 sess->auth.ctx_len = CCP_SB_BYTES << 1;
970 sess->auth.offset = ((CCP_SB_BYTES << 1) -
971 SHA384_DIGEST_SIZE);
972 sess->auth.block_size = SHA384_BLOCK_SIZE;
973 sess->auth.key_length = auth_xform->key.length;
974 memset(sess->auth.key, 0, sess->auth.block_size);
975 memset(sess->auth.pre_compute, 0,
976 sess->auth.ctx_len << 1);
977 rte_memcpy(sess->auth.key, auth_xform->key.data,
978 auth_xform->key.length);
979 if (generate_partial_hash(sess))
980 return -1;
981 }
982 break;
983 case RTE_CRYPTO_AUTH_SHA3_384:
984 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
985 sess->auth.engine = CCP_ENGINE_SHA;
986 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
987 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
988 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
989 break;
990 case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
991 if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
992 return -1;
993 sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
994 sess->auth.engine = CCP_ENGINE_SHA;
995 sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
996 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
997 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
998 sess->auth.block_size = SHA3_384_BLOCK_SIZE;
999 sess->auth.key_length = auth_xform->key.length;
1000 memset(sess->auth.key, 0, sess->auth.block_size);
1001 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1002 rte_memcpy(sess->auth.key, auth_xform->key.data,
1003 auth_xform->key.length);
1004 if (generate_partial_hash(sess))
1005 return -1;
1006 break;
1007 case RTE_CRYPTO_AUTH_SHA512:
1008 sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1009 sess->auth.engine = CCP_ENGINE_SHA;
1010 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1011 sess->auth.ctx = (void *)ccp_sha512_init;
1012 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1013 sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1014 break;
1015 case RTE_CRYPTO_AUTH_SHA512_HMAC:
1016 if (sess->auth_opt) {
1017 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1018 return -1;
1019 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1020 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1021 SHA512_DIGEST_SIZE);
1022 sess->auth.block_size = SHA512_BLOCK_SIZE;
1023 sess->auth.key_length = auth_xform->key.length;
1024 memset(sess->auth.key, 0, sess->auth.block_size);
1025 rte_memcpy(sess->auth.key, auth_xform->key.data,
1026 auth_xform->key.length);
1027 } else {
1028 if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1029 return -1;
1030 sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1031 sess->auth.engine = CCP_ENGINE_SHA;
1032 sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1033 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1034 sess->auth.offset = ((CCP_SB_BYTES << 1) -
1035 SHA512_DIGEST_SIZE);
1036 sess->auth.block_size = SHA512_BLOCK_SIZE;
1037 sess->auth.key_length = auth_xform->key.length;
1038 memset(sess->auth.key, 0, sess->auth.block_size);
1039 memset(sess->auth.pre_compute, 0,
1040 sess->auth.ctx_len << 1);
1041 rte_memcpy(sess->auth.key, auth_xform->key.data,
1042 auth_xform->key.length);
1043 if (generate_partial_hash(sess))
1044 return -1;
1045 }
1046 break;
1047 case RTE_CRYPTO_AUTH_SHA3_512:
1048 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1049 sess->auth.engine = CCP_ENGINE_SHA;
1050 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1051 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1052 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1053 break;
1054 case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1055 if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1056 return -1;
1057 sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1058 sess->auth.engine = CCP_ENGINE_SHA;
1059 sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1060 sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1061 sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1062 sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1063 sess->auth.key_length = auth_xform->key.length;
1064 memset(sess->auth.key, 0, sess->auth.block_size);
1065 memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1066 rte_memcpy(sess->auth.key, auth_xform->key.data,
1067 auth_xform->key.length);
1068 if (generate_partial_hash(sess))
1069 return -1;
1070 break;
1071 case RTE_CRYPTO_AUTH_AES_CMAC:
1072 sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1073 sess->auth.engine = CCP_ENGINE_AES;
1074 sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1075 sess->auth.key_length = auth_xform->key.length;
1076 /* padding and hash result */
1077 sess->auth.ctx_len = CCP_SB_BYTES << 1;
1078 sess->auth.offset = AES_BLOCK_SIZE;
1079 sess->auth.block_size = AES_BLOCK_SIZE;
1080 if (sess->auth.key_length == 16)
1081 sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1082 else if (sess->auth.key_length == 24)
1083 sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1084 else if (sess->auth.key_length == 32)
1085 sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1086 else {
1087 CCP_LOG_ERR("Invalid CMAC key length");
1088 return -1;
1089 }
1090 rte_memcpy(sess->auth.key, auth_xform->key.data,
1091 sess->auth.key_length);
1092 for (i = 0; i < sess->auth.key_length; i++)
1093 sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1094 sess->auth.key[i];
1095 if (generate_cmac_subkeys(sess))
1096 return -1;
1097 break;
1098 default:
1099 CCP_LOG_ERR("Unsupported hash algo");
1100 return -ENOTSUP;
1101 }
1102 return 0;
1103 }
1104
1105 static int
ccp_configure_session_aead(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform)1106 ccp_configure_session_aead(struct ccp_session *sess,
1107 const struct rte_crypto_sym_xform *xform)
1108 {
1109 const struct rte_crypto_aead_xform *aead_xform = NULL;
1110 size_t i;
1111
1112 aead_xform = &xform->aead;
1113
1114 sess->cipher.key_length = aead_xform->key.length;
1115 rte_memcpy(sess->cipher.key, aead_xform->key.data,
1116 aead_xform->key.length);
1117
1118 if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1119 sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1120 sess->auth.op = CCP_AUTH_OP_GENERATE;
1121 } else {
1122 sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1123 sess->auth.op = CCP_AUTH_OP_VERIFY;
1124 }
1125 sess->aead_algo = aead_xform->algo;
1126 sess->auth.aad_length = aead_xform->aad_length;
1127 sess->auth.digest_length = aead_xform->digest_length;
1128
1129 /* set iv parameters */
1130 sess->iv.offset = aead_xform->iv.offset;
1131 sess->iv.length = aead_xform->iv.length;
1132
1133 switch (aead_xform->algo) {
1134 case RTE_CRYPTO_AEAD_AES_GCM:
1135 sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1136 sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1137 sess->cipher.engine = CCP_ENGINE_AES;
1138 if (sess->cipher.key_length == 16)
1139 sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1140 else if (sess->cipher.key_length == 24)
1141 sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1142 else if (sess->cipher.key_length == 32)
1143 sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1144 else {
1145 CCP_LOG_ERR("Invalid aead key length");
1146 return -1;
1147 }
1148 for (i = 0; i < sess->cipher.key_length; i++)
1149 sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1150 sess->cipher.key[i];
1151 sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1152 sess->auth.engine = CCP_ENGINE_AES;
1153 sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1154 sess->auth.ctx_len = CCP_SB_BYTES;
1155 sess->auth.offset = 0;
1156 sess->auth.block_size = AES_BLOCK_SIZE;
1157 sess->cmd_id = CCP_CMD_COMBINED;
1158 break;
1159 default:
1160 CCP_LOG_ERR("Unsupported aead algo");
1161 return -ENOTSUP;
1162 }
1163 sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
1164 sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
1165 return 0;
1166 }
1167
1168 int
ccp_set_session_parameters(struct ccp_session * sess,const struct rte_crypto_sym_xform * xform,struct ccp_private * internals)1169 ccp_set_session_parameters(struct ccp_session *sess,
1170 const struct rte_crypto_sym_xform *xform,
1171 struct ccp_private *internals)
1172 {
1173 const struct rte_crypto_sym_xform *cipher_xform = NULL;
1174 const struct rte_crypto_sym_xform *auth_xform = NULL;
1175 const struct rte_crypto_sym_xform *aead_xform = NULL;
1176 int ret = 0;
1177
1178 sess->auth_opt = internals->auth_opt;
1179 sess->cmd_id = ccp_get_cmd_id(xform);
1180
1181 switch (sess->cmd_id) {
1182 case CCP_CMD_CIPHER:
1183 cipher_xform = xform;
1184 break;
1185 case CCP_CMD_AUTH:
1186 auth_xform = xform;
1187 break;
1188 case CCP_CMD_CIPHER_HASH:
1189 cipher_xform = xform;
1190 auth_xform = xform->next;
1191 break;
1192 case CCP_CMD_HASH_CIPHER:
1193 auth_xform = xform;
1194 cipher_xform = xform->next;
1195 break;
1196 case CCP_CMD_COMBINED:
1197 aead_xform = xform;
1198 break;
1199 default:
1200 CCP_LOG_ERR("Unsupported cmd_id");
1201 return -1;
1202 }
1203
1204 /* Default IV length = 0 */
1205 sess->iv.length = 0;
1206 if (cipher_xform) {
1207 ret = ccp_configure_session_cipher(sess, cipher_xform);
1208 if (ret != 0) {
1209 CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1210 return ret;
1211 }
1212 }
1213 if (auth_xform) {
1214 ret = ccp_configure_session_auth(sess, auth_xform);
1215 if (ret != 0) {
1216 CCP_LOG_ERR("Invalid/unsupported auth parameters");
1217 return ret;
1218 }
1219 }
1220 if (aead_xform) {
1221 ret = ccp_configure_session_aead(sess, aead_xform);
1222 if (ret != 0) {
1223 CCP_LOG_ERR("Invalid/unsupported aead parameters");
1224 return ret;
1225 }
1226 }
1227 return ret;
1228 }
1229
1230 /* calculate CCP descriptors requirement */
1231 static inline int
ccp_cipher_slot(struct ccp_session * session)1232 ccp_cipher_slot(struct ccp_session *session)
1233 {
1234 int count = 0;
1235
1236 switch (session->cipher.algo) {
1237 case CCP_CIPHER_ALGO_AES_CBC:
1238 count = 2;
1239 /**< op + passthrough for iv */
1240 break;
1241 case CCP_CIPHER_ALGO_AES_ECB:
1242 count = 1;
1243 /**<only op*/
1244 break;
1245 case CCP_CIPHER_ALGO_AES_CTR:
1246 count = 2;
1247 /**< op + passthrough for iv */
1248 break;
1249 case CCP_CIPHER_ALGO_3DES_CBC:
1250 count = 2;
1251 /**< op + passthrough for iv */
1252 break;
1253 default:
1254 CCP_LOG_ERR("Unsupported cipher algo %d",
1255 session->cipher.algo);
1256 }
1257 return count;
1258 }
1259
1260 static inline int
ccp_auth_slot(struct ccp_session * session)1261 ccp_auth_slot(struct ccp_session *session)
1262 {
1263 int count = 0;
1264
1265 switch (session->auth.algo) {
1266 case CCP_AUTH_ALGO_SHA1:
1267 case CCP_AUTH_ALGO_SHA224:
1268 case CCP_AUTH_ALGO_SHA256:
1269 case CCP_AUTH_ALGO_SHA384:
1270 case CCP_AUTH_ALGO_SHA512:
1271 count = 3;
1272 /**< op + lsb passthrough cpy to/from*/
1273 break;
1274 case CCP_AUTH_ALGO_MD5_HMAC:
1275 break;
1276 case CCP_AUTH_ALGO_SHA1_HMAC:
1277 case CCP_AUTH_ALGO_SHA224_HMAC:
1278 case CCP_AUTH_ALGO_SHA256_HMAC:
1279 if (session->auth_opt == 0)
1280 count = 6;
1281 break;
1282 case CCP_AUTH_ALGO_SHA384_HMAC:
1283 case CCP_AUTH_ALGO_SHA512_HMAC:
1284 /**
1285 * 1. Load PHash1 = H(k ^ ipad); to LSB
1286 * 2. generate IHash = H(hash on message with PHash1
1287 * as init values);
1288 * 3. Retrieve IHash 2 slots for 384/512
1289 * 4. Load Phash2 = H(k ^ opad); to LSB
1290 * 5. generate FHash = H(hash on Ihash with Phash2
1291 * as init value);
1292 * 6. Retrieve HMAC output from LSB to host memory
1293 */
1294 if (session->auth_opt == 0)
1295 count = 7;
1296 break;
1297 case CCP_AUTH_ALGO_SHA3_224:
1298 case CCP_AUTH_ALGO_SHA3_256:
1299 case CCP_AUTH_ALGO_SHA3_384:
1300 case CCP_AUTH_ALGO_SHA3_512:
1301 count = 1;
1302 /**< only op ctx and dst in host memory*/
1303 break;
1304 case CCP_AUTH_ALGO_SHA3_224_HMAC:
1305 case CCP_AUTH_ALGO_SHA3_256_HMAC:
1306 count = 3;
1307 break;
1308 case CCP_AUTH_ALGO_SHA3_384_HMAC:
1309 case CCP_AUTH_ALGO_SHA3_512_HMAC:
1310 count = 4;
1311 /**
1312 * 1. Op to Perform Ihash
1313 * 2. Retrieve result from LSB to host memory
1314 * 3. Perform final hash
1315 */
1316 break;
1317 case CCP_AUTH_ALGO_AES_CMAC:
1318 count = 4;
1319 /**
1320 * op
1321 * extra descriptor in padding case
1322 * (k1/k2(255:128) with iv(127:0))
1323 * Retrieve result
1324 */
1325 break;
1326 default:
1327 CCP_LOG_ERR("Unsupported auth algo %d",
1328 session->auth.algo);
1329 }
1330
1331 return count;
1332 }
1333
1334 static int
ccp_aead_slot(struct ccp_session * session)1335 ccp_aead_slot(struct ccp_session *session)
1336 {
1337 int count = 0;
1338
1339 switch (session->aead_algo) {
1340 case RTE_CRYPTO_AEAD_AES_GCM:
1341 break;
1342 default:
1343 CCP_LOG_ERR("Unsupported aead algo %d",
1344 session->aead_algo);
1345 }
1346 switch (session->auth.algo) {
1347 case CCP_AUTH_ALGO_AES_GCM:
1348 count = 5;
1349 /**
1350 * 1. Passthru iv
1351 * 2. Hash AAD
1352 * 3. GCTR
1353 * 4. Reload passthru
1354 * 5. Hash Final tag
1355 */
1356 break;
1357 default:
1358 CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1359 session->auth.algo);
1360 }
1361 return count;
1362 }
1363
1364 int
ccp_compute_slot_count(struct ccp_session * session)1365 ccp_compute_slot_count(struct ccp_session *session)
1366 {
1367 int count = 0;
1368
1369 switch (session->cmd_id) {
1370 case CCP_CMD_CIPHER:
1371 count = ccp_cipher_slot(session);
1372 break;
1373 case CCP_CMD_AUTH:
1374 count = ccp_auth_slot(session);
1375 break;
1376 case CCP_CMD_CIPHER_HASH:
1377 case CCP_CMD_HASH_CIPHER:
1378 count = ccp_cipher_slot(session);
1379 count += ccp_auth_slot(session);
1380 break;
1381 case CCP_CMD_COMBINED:
1382 count = ccp_aead_slot(session);
1383 break;
1384 default:
1385 CCP_LOG_ERR("Unsupported cmd_id");
1386
1387 }
1388
1389 return count;
1390 }
1391
1392 static uint8_t
algo_select(int sessalgo,const EVP_MD ** algo)1393 algo_select(int sessalgo,
1394 const EVP_MD **algo)
1395 {
1396 int res = 0;
1397
1398 switch (sessalgo) {
1399 case CCP_AUTH_ALGO_MD5_HMAC:
1400 *algo = EVP_md5();
1401 break;
1402 case CCP_AUTH_ALGO_SHA1_HMAC:
1403 *algo = EVP_sha1();
1404 break;
1405 case CCP_AUTH_ALGO_SHA224_HMAC:
1406 *algo = EVP_sha224();
1407 break;
1408 case CCP_AUTH_ALGO_SHA256_HMAC:
1409 *algo = EVP_sha256();
1410 break;
1411 case CCP_AUTH_ALGO_SHA384_HMAC:
1412 *algo = EVP_sha384();
1413 break;
1414 case CCP_AUTH_ALGO_SHA512_HMAC:
1415 *algo = EVP_sha512();
1416 break;
1417 default:
1418 res = -EINVAL;
1419 break;
1420 }
1421 return res;
1422 }
1423
1424 static int
process_cpu_auth_hmac(uint8_t * src,uint8_t * dst,__rte_unused uint8_t * iv,EVP_PKEY * pkey,int srclen,EVP_MD_CTX * ctx,const EVP_MD * algo,uint16_t d_len)1425 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1426 __rte_unused uint8_t *iv,
1427 EVP_PKEY *pkey,
1428 int srclen,
1429 EVP_MD_CTX *ctx,
1430 const EVP_MD *algo,
1431 uint16_t d_len)
1432 {
1433 size_t dstlen;
1434 unsigned char temp_dst[64];
1435
1436 if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1437 goto process_auth_err;
1438
1439 if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1440 goto process_auth_err;
1441
1442 if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1443 goto process_auth_err;
1444
1445 memcpy(dst, temp_dst, d_len);
1446 return 0;
1447 process_auth_err:
1448 CCP_LOG_ERR("Process cpu auth failed");
1449 return -EINVAL;
1450 }
1451
cpu_crypto_auth(struct ccp_qp * qp,struct rte_crypto_op * op,struct ccp_session * sess,EVP_MD_CTX * ctx)1452 static int cpu_crypto_auth(struct ccp_qp *qp,
1453 struct rte_crypto_op *op,
1454 struct ccp_session *sess,
1455 EVP_MD_CTX *ctx)
1456 {
1457 uint8_t *src, *dst;
1458 int srclen, status;
1459 struct rte_mbuf *mbuf_src, *mbuf_dst;
1460 const EVP_MD *algo = NULL;
1461 EVP_PKEY *pkey;
1462
1463 algo_select(sess->auth.algo, &algo);
1464 pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1465 sess->auth.key_length);
1466 mbuf_src = op->sym->m_src;
1467 mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1468 srclen = op->sym->auth.data.length;
1469 src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1470 op->sym->auth.data.offset);
1471
1472 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1473 dst = qp->temp_digest;
1474 } else {
1475 dst = op->sym->auth.digest.data;
1476 if (dst == NULL) {
1477 dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1478 op->sym->auth.data.offset +
1479 sess->auth.digest_length);
1480 }
1481 }
1482 status = process_cpu_auth_hmac(src, dst, NULL,
1483 pkey, srclen,
1484 ctx,
1485 algo,
1486 sess->auth.digest_length);
1487 if (status) {
1488 op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1489 return status;
1490 }
1491
1492 if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1493 if (memcmp(dst, op->sym->auth.digest.data,
1494 sess->auth.digest_length) != 0) {
1495 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1496 } else {
1497 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1498 }
1499 } else {
1500 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1501 }
1502 EVP_PKEY_free(pkey);
1503 return 0;
1504 }
1505
1506 static void
ccp_perform_passthru(struct ccp_passthru * pst,struct ccp_queue * cmd_q)1507 ccp_perform_passthru(struct ccp_passthru *pst,
1508 struct ccp_queue *cmd_q)
1509 {
1510 struct ccp_desc *desc;
1511 union ccp_function function;
1512
1513 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1514
1515 CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1516
1517 CCP_CMD_SOC(desc) = 0;
1518 CCP_CMD_IOC(desc) = 0;
1519 CCP_CMD_INIT(desc) = 0;
1520 CCP_CMD_EOM(desc) = 0;
1521 CCP_CMD_PROT(desc) = 0;
1522
1523 function.raw = 0;
1524 CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1525 CCP_PT_BITWISE(&function) = pst->bit_mod;
1526 CCP_CMD_FUNCTION(desc) = function.raw;
1527
1528 CCP_CMD_LEN(desc) = pst->len;
1529
1530 if (pst->dir) {
1531 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1532 CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1533 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1534
1535 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1536 CCP_CMD_DST_HI(desc) = 0;
1537 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1538
1539 if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1540 CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1541 } else {
1542
1543 CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1544 CCP_CMD_SRC_HI(desc) = 0;
1545 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1546
1547 CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1548 CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1549 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1550 }
1551
1552 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1553 }
1554
1555 static int
ccp_perform_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1556 ccp_perform_hmac(struct rte_crypto_op *op,
1557 struct ccp_queue *cmd_q)
1558 {
1559
1560 struct ccp_session *session;
1561 union ccp_function function;
1562 struct ccp_desc *desc;
1563 uint32_t tail;
1564 phys_addr_t src_addr, dest_addr, dest_addr_t;
1565 struct ccp_passthru pst;
1566 uint64_t auth_msg_bits;
1567 void *append_ptr;
1568 uint8_t *addr;
1569
1570 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1571 addr = session->auth.pre_compute;
1572
1573 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1574 op->sym->auth.data.offset);
1575 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1576 session->auth.ctx_len);
1577 dest_addr_t = dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1578 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1579
1580 /** Load PHash1 to LSB*/
1581 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1582 pst.len = session->auth.ctx_len;
1583 pst.dir = 1;
1584 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1585 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1586 ccp_perform_passthru(&pst, cmd_q);
1587
1588 /**sha engine command descriptor for IntermediateHash*/
1589
1590 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1591 memset(desc, 0, Q_DESC_SIZE);
1592
1593 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1594
1595 CCP_CMD_SOC(desc) = 0;
1596 CCP_CMD_IOC(desc) = 0;
1597 CCP_CMD_INIT(desc) = 1;
1598 CCP_CMD_EOM(desc) = 1;
1599 CCP_CMD_PROT(desc) = 0;
1600
1601 function.raw = 0;
1602 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1603 CCP_CMD_FUNCTION(desc) = function.raw;
1604
1605 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1606 auth_msg_bits = (op->sym->auth.data.length +
1607 session->auth.block_size) * 8;
1608
1609 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1610 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1611 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1612
1613 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1614 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1615 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1616
1617 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1618
1619 rte_wmb();
1620
1621 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1622 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1623 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1624 cmd_q->qcontrol | CMD_Q_RUN);
1625
1626 /* Intermediate Hash value retrieve */
1627 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1628 (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1629
1630 pst.src_addr =
1631 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1632 pst.dest_addr = dest_addr_t;
1633 pst.len = CCP_SB_BYTES;
1634 pst.dir = 0;
1635 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1636 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1637 ccp_perform_passthru(&pst, cmd_q);
1638
1639 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1640 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1641 pst.len = CCP_SB_BYTES;
1642 pst.dir = 0;
1643 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1644 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1645 ccp_perform_passthru(&pst, cmd_q);
1646
1647 } else {
1648 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1649 pst.dest_addr = dest_addr_t;
1650 pst.len = session->auth.ctx_len;
1651 pst.dir = 0;
1652 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1653 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1654 ccp_perform_passthru(&pst, cmd_q);
1655
1656 }
1657
1658 /** Load PHash2 to LSB*/
1659 addr += session->auth.ctx_len;
1660 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1661 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1662 pst.len = session->auth.ctx_len;
1663 pst.dir = 1;
1664 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1665 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1666 ccp_perform_passthru(&pst, cmd_q);
1667
1668 /**sha engine command descriptor for FinalHash*/
1669 dest_addr_t += session->auth.offset;
1670
1671 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1672 memset(desc, 0, Q_DESC_SIZE);
1673
1674 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1675
1676 CCP_CMD_SOC(desc) = 0;
1677 CCP_CMD_IOC(desc) = 0;
1678 CCP_CMD_INIT(desc) = 1;
1679 CCP_CMD_EOM(desc) = 1;
1680 CCP_CMD_PROT(desc) = 0;
1681
1682 function.raw = 0;
1683 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1684 CCP_CMD_FUNCTION(desc) = function.raw;
1685
1686 CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1687 session->auth.offset);
1688 auth_msg_bits = (session->auth.block_size +
1689 session->auth.ctx_len -
1690 session->auth.offset) * 8;
1691
1692 CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1693 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1694 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1695
1696 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1697 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1698 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1699
1700 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1701
1702 rte_wmb();
1703
1704 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1705 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1706 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1707 cmd_q->qcontrol | CMD_Q_RUN);
1708
1709 /* Retrieve hmac output */
1710 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1711 pst.dest_addr = dest_addr;
1712 pst.len = session->auth.ctx_len;
1713 pst.dir = 0;
1714 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1715 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1716 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1717 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1718 else
1719 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1720 ccp_perform_passthru(&pst, cmd_q);
1721
1722 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1723 return 0;
1724
1725 }
1726
1727 static int
ccp_perform_sha(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1728 ccp_perform_sha(struct rte_crypto_op *op,
1729 struct ccp_queue *cmd_q)
1730 {
1731 struct ccp_session *session;
1732 union ccp_function function;
1733 struct ccp_desc *desc;
1734 uint32_t tail;
1735 phys_addr_t src_addr, dest_addr;
1736 struct ccp_passthru pst;
1737 void *append_ptr;
1738 uint64_t auth_msg_bits;
1739
1740 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1741
1742 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1743 op->sym->auth.data.offset);
1744 append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1745 session->auth.ctx_len);
1746 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)session->auth.ctx);
1747 dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1748
1749 /** Passthru sha context*/
1750
1751 pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1752 pst.len = session->auth.ctx_len;
1753 pst.dir = 1;
1754 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1755 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1756 ccp_perform_passthru(&pst, cmd_q);
1757
1758 /**prepare sha command descriptor*/
1759
1760 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1761 memset(desc, 0, Q_DESC_SIZE);
1762
1763 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1764
1765 CCP_CMD_SOC(desc) = 0;
1766 CCP_CMD_IOC(desc) = 0;
1767 CCP_CMD_INIT(desc) = 1;
1768 CCP_CMD_EOM(desc) = 1;
1769 CCP_CMD_PROT(desc) = 0;
1770
1771 function.raw = 0;
1772 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1773 CCP_CMD_FUNCTION(desc) = function.raw;
1774
1775 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1776 auth_msg_bits = op->sym->auth.data.length * 8;
1777
1778 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1779 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1780 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1781
1782 CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1783 CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1784 CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1785
1786 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1787
1788 rte_wmb();
1789
1790 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1791 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1792 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1793 cmd_q->qcontrol | CMD_Q_RUN);
1794
1795 /* Hash value retrieve */
1796 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1797 pst.dest_addr = dest_addr;
1798 pst.len = session->auth.ctx_len;
1799 pst.dir = 0;
1800 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1801 if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1802 (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1803 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1804 else
1805 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1806 ccp_perform_passthru(&pst, cmd_q);
1807
1808 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1809 return 0;
1810
1811 }
1812
1813 static int
ccp_perform_sha3_hmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1814 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1815 struct ccp_queue *cmd_q)
1816 {
1817 struct ccp_session *session;
1818 struct ccp_passthru pst;
1819 union ccp_function function;
1820 struct ccp_desc *desc;
1821 uint8_t *append_ptr;
1822 uint32_t tail;
1823 phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1824
1825 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1826
1827 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1828 op->sym->auth.data.offset);
1829 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1830 session->auth.ctx_len);
1831 if (!append_ptr) {
1832 CCP_LOG_ERR("CCP MBUF append failed");
1833 return -1;
1834 }
1835 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1836 ctx_paddr = (phys_addr_t)rte_mem_virt2iova(session->auth.pre_compute);
1837 dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1838 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1839 memset(desc, 0, Q_DESC_SIZE);
1840
1841 /*desc1 for SHA3-Ihash operation */
1842 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1843 CCP_CMD_INIT(desc) = 1;
1844 CCP_CMD_EOM(desc) = 1;
1845
1846 function.raw = 0;
1847 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1848 CCP_CMD_FUNCTION(desc) = function.raw;
1849 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1850
1851 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1852 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1853 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1854
1855 CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1856 CCP_CMD_DST_HI(desc) = 0;
1857 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1858
1859 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1860 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1861 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1862
1863 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1864
1865 rte_wmb();
1866 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1867 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1868 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1869 cmd_q->qcontrol | CMD_Q_RUN);
1870
1871 /* Intermediate Hash value retrieve */
1872 if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1873 (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1874
1875 pst.src_addr =
1876 (phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1877 pst.dest_addr = dest_addr_t;
1878 pst.len = CCP_SB_BYTES;
1879 pst.dir = 0;
1880 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1881 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1882 ccp_perform_passthru(&pst, cmd_q);
1883
1884 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1885 pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1886 pst.len = CCP_SB_BYTES;
1887 pst.dir = 0;
1888 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1889 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1890 ccp_perform_passthru(&pst, cmd_q);
1891
1892 } else {
1893 pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1894 pst.dest_addr = dest_addr_t;
1895 pst.len = CCP_SB_BYTES;
1896 pst.dir = 0;
1897 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1898 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1899 ccp_perform_passthru(&pst, cmd_q);
1900 }
1901
1902 /**sha engine command descriptor for FinalHash*/
1903 ctx_paddr += CCP_SHA3_CTX_SIZE;
1904 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1905 memset(desc, 0, Q_DESC_SIZE);
1906
1907 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1908 CCP_CMD_INIT(desc) = 1;
1909 CCP_CMD_EOM(desc) = 1;
1910
1911 function.raw = 0;
1912 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1913 CCP_CMD_FUNCTION(desc) = function.raw;
1914
1915 if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1916 dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1917 CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1918 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1919 CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1920 } else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1921 dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1922 CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1923 } else {
1924 CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1925 }
1926
1927 CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1928 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1929 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1930
1931 CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1932 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1933 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1934
1935 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1936 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1937 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1938
1939 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1940
1941 rte_wmb();
1942 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1943 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1944 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1945 cmd_q->qcontrol | CMD_Q_RUN);
1946
1947 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1948 return 0;
1949 }
1950
1951 static int
ccp_perform_sha3(struct rte_crypto_op * op,struct ccp_queue * cmd_q)1952 ccp_perform_sha3(struct rte_crypto_op *op,
1953 struct ccp_queue *cmd_q)
1954 {
1955 struct ccp_session *session;
1956 union ccp_function function;
1957 struct ccp_desc *desc;
1958 uint8_t *ctx_addr = NULL, *append_ptr = NULL;
1959 uint32_t tail;
1960 phys_addr_t src_addr, dest_addr, ctx_paddr;
1961
1962 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1963
1964 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1965 op->sym->auth.data.offset);
1966 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1967 session->auth.ctx_len);
1968 if (!append_ptr) {
1969 CCP_LOG_ERR("CCP MBUF append failed");
1970 return -1;
1971 }
1972 dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1973 ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
1974
1975 ctx_addr = session->auth.sha3_ctx;
1976
1977 desc = &cmd_q->qbase_desc[cmd_q->qidx];
1978 memset(desc, 0, Q_DESC_SIZE);
1979
1980 /* prepare desc for SHA3 operation */
1981 CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1982 CCP_CMD_INIT(desc) = 1;
1983 CCP_CMD_EOM(desc) = 1;
1984
1985 function.raw = 0;
1986 CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1987 CCP_CMD_FUNCTION(desc) = function.raw;
1988
1989 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1990
1991 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1992 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1993 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1994
1995 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1996 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1997 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1998
1999 CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2000 CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2001 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2002
2003 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2004
2005 rte_wmb();
2006
2007 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2008 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2009 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2010 cmd_q->qcontrol | CMD_Q_RUN);
2011
2012 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2013 return 0;
2014 }
2015
2016 static int
ccp_perform_aes_cmac(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2017 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2018 struct ccp_queue *cmd_q)
2019 {
2020 struct ccp_session *session;
2021 union ccp_function function;
2022 struct ccp_passthru pst;
2023 struct ccp_desc *desc;
2024 uint32_t tail;
2025 uint8_t *src_tb, *append_ptr, *ctx_addr;
2026 phys_addr_t src_addr, dest_addr, key_addr;
2027 int length, non_align_len;
2028
2029 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2030 key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2031
2032 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2033 op->sym->auth.data.offset);
2034 append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2035 session->auth.ctx_len);
2036 dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2037
2038 function.raw = 0;
2039 CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2040 CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2041 CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2042
2043 if (op->sym->auth.data.length % session->auth.block_size == 0) {
2044
2045 ctx_addr = session->auth.pre_compute;
2046 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2047 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2048 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2049 pst.len = CCP_SB_BYTES;
2050 pst.dir = 1;
2051 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2052 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2053 ccp_perform_passthru(&pst, cmd_q);
2054
2055 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2056 memset(desc, 0, Q_DESC_SIZE);
2057
2058 /* prepare desc for aes-cmac command */
2059 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2060 CCP_CMD_EOM(desc) = 1;
2061 CCP_CMD_FUNCTION(desc) = function.raw;
2062
2063 CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2064 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2065 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2066 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2067
2068 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2069 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2070 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2071 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2072
2073 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2074
2075 rte_wmb();
2076
2077 tail =
2078 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2079 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2080 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2081 cmd_q->qcontrol | CMD_Q_RUN);
2082 } else {
2083 ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2084 memset(ctx_addr, 0, AES_BLOCK_SIZE);
2085 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2086 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2087 pst.len = CCP_SB_BYTES;
2088 pst.dir = 1;
2089 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2090 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2091 ccp_perform_passthru(&pst, cmd_q);
2092
2093 length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2094 length *= AES_BLOCK_SIZE;
2095 non_align_len = op->sym->auth.data.length - length;
2096 /* prepare desc for aes-cmac command */
2097 /*Command 1*/
2098 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2099 memset(desc, 0, Q_DESC_SIZE);
2100
2101 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2102 CCP_CMD_INIT(desc) = 1;
2103 CCP_CMD_FUNCTION(desc) = function.raw;
2104
2105 CCP_CMD_LEN(desc) = length;
2106 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2107 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2108 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2109
2110 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2111 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2112 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2113 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2114
2115 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2116
2117 /*Command 2*/
2118 append_ptr = append_ptr + CCP_SB_BYTES;
2119 memset(append_ptr, 0, AES_BLOCK_SIZE);
2120 src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2121 uint8_t *,
2122 op->sym->auth.data.offset +
2123 length);
2124 rte_memcpy(append_ptr, src_tb, non_align_len);
2125 append_ptr[non_align_len] = CMAC_PAD_VALUE;
2126
2127 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2128 memset(desc, 0, Q_DESC_SIZE);
2129
2130 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2131 CCP_CMD_EOM(desc) = 1;
2132 CCP_CMD_FUNCTION(desc) = function.raw;
2133 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2134
2135 CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2136 CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2137 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2138
2139 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2140 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2141 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2142 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2143
2144 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2145
2146 rte_wmb();
2147 tail =
2148 (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2149 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2150 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2151 cmd_q->qcontrol | CMD_Q_RUN);
2152 }
2153 /* Retrieve result */
2154 pst.dest_addr = dest_addr;
2155 pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2156 pst.len = CCP_SB_BYTES;
2157 pst.dir = 0;
2158 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2159 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2160 ccp_perform_passthru(&pst, cmd_q);
2161
2162 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2163 return 0;
2164 }
2165
2166 static int
ccp_perform_aes(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2167 ccp_perform_aes(struct rte_crypto_op *op,
2168 struct ccp_queue *cmd_q,
2169 struct ccp_batch_info *b_info)
2170 {
2171 struct ccp_session *session;
2172 union ccp_function function;
2173 uint8_t *lsb_buf;
2174 struct ccp_passthru pst = {0};
2175 struct ccp_desc *desc;
2176 phys_addr_t src_addr, dest_addr, key_addr;
2177 uint8_t *iv;
2178
2179 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2180 function.raw = 0;
2181
2182 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2183 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2184 if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2185 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2186 iv, session->iv.length);
2187 pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2188 CCP_AES_SIZE(&function) = 0x1F;
2189 } else {
2190 lsb_buf =
2191 &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2192 rte_memcpy(lsb_buf +
2193 (CCP_SB_BYTES - session->iv.length),
2194 iv, session->iv.length);
2195 pst.src_addr = b_info->lsb_buf_phys +
2196 (b_info->lsb_buf_idx * CCP_SB_BYTES);
2197 b_info->lsb_buf_idx++;
2198 }
2199
2200 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2201 pst.len = CCP_SB_BYTES;
2202 pst.dir = 1;
2203 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2204 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2205 ccp_perform_passthru(&pst, cmd_q);
2206 }
2207
2208 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2209
2210 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2211 op->sym->cipher.data.offset);
2212 if (likely(op->sym->m_dst != NULL))
2213 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2214 op->sym->cipher.data.offset);
2215 else
2216 dest_addr = src_addr;
2217 key_addr = session->cipher.key_phys;
2218
2219 /* prepare desc for aes command */
2220 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2221 CCP_CMD_INIT(desc) = 1;
2222 CCP_CMD_EOM(desc) = 1;
2223
2224 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2225 CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2226 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2227 CCP_CMD_FUNCTION(desc) = function.raw;
2228
2229 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2230
2231 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2232 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2233 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2234
2235 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2236 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2237 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2238
2239 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2240 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2241 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2242
2243 if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2244 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2245
2246 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2247 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2248 return 0;
2249 }
2250
2251 static int
ccp_perform_3des(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2252 ccp_perform_3des(struct rte_crypto_op *op,
2253 struct ccp_queue *cmd_q,
2254 struct ccp_batch_info *b_info)
2255 {
2256 struct ccp_session *session;
2257 union ccp_function function;
2258 unsigned char *lsb_buf;
2259 struct ccp_passthru pst;
2260 struct ccp_desc *desc;
2261 uint32_t tail;
2262 uint8_t *iv;
2263 phys_addr_t src_addr, dest_addr, key_addr;
2264
2265 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2266
2267 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2268 switch (session->cipher.um.des_mode) {
2269 case CCP_DES_MODE_CBC:
2270 lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2271 b_info->lsb_buf_idx++;
2272
2273 rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2274 iv, session->iv.length);
2275 pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *) lsb_buf);
2276 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2277 pst.len = CCP_SB_BYTES;
2278 pst.dir = 1;
2279 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2280 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2281 ccp_perform_passthru(&pst, cmd_q);
2282 break;
2283 case CCP_DES_MODE_CFB:
2284 case CCP_DES_MODE_ECB:
2285 CCP_LOG_ERR("Unsupported DES cipher mode");
2286 return -ENOTSUP;
2287 }
2288
2289 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2290 op->sym->cipher.data.offset);
2291 if (unlikely(op->sym->m_dst != NULL))
2292 dest_addr =
2293 rte_pktmbuf_iova_offset(op->sym->m_dst,
2294 op->sym->cipher.data.offset);
2295 else
2296 dest_addr = src_addr;
2297
2298 key_addr = rte_mem_virt2iova(session->cipher.key_ccp);
2299 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2300
2301 memset(desc, 0, Q_DESC_SIZE);
2302
2303 /* prepare desc for des command */
2304 CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2305
2306 CCP_CMD_SOC(desc) = 0;
2307 CCP_CMD_IOC(desc) = 0;
2308 CCP_CMD_INIT(desc) = 1;
2309 CCP_CMD_EOM(desc) = 1;
2310 CCP_CMD_PROT(desc) = 0;
2311
2312 function.raw = 0;
2313 CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2314 CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2315 CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2316 CCP_CMD_FUNCTION(desc) = function.raw;
2317
2318 CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2319
2320 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2321 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2322 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2323
2324 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2325 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2326 CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2327
2328 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2329 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2330 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2331
2332 if (session->cipher.um.des_mode)
2333 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2334
2335 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2336
2337 rte_wmb();
2338
2339 /* Write the new tail address back to the queue register */
2340 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2341 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2342 /* Turn the queue back on using our cached control register */
2343 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2344 cmd_q->qcontrol | CMD_Q_RUN);
2345
2346 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2347 return 0;
2348 }
2349
2350 static int
ccp_perform_aes_gcm(struct rte_crypto_op * op,struct ccp_queue * cmd_q)2351 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2352 {
2353 struct ccp_session *session;
2354 union ccp_function function;
2355 uint8_t *iv;
2356 struct ccp_passthru pst;
2357 struct ccp_desc *desc;
2358 uint32_t tail;
2359 uint64_t *temp;
2360 phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2361 phys_addr_t digest_dest_addr;
2362 int length, non_align_len;
2363
2364 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2365 iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2366 key_addr = session->cipher.key_phys;
2367
2368 src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2369 op->sym->aead.data.offset);
2370 if (unlikely(op->sym->m_dst != NULL))
2371 dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2372 op->sym->aead.data.offset);
2373 else
2374 dest_addr = src_addr;
2375 rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2376 digest_dest_addr = op->sym->aead.digest.phys_addr;
2377 temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2378 *temp++ = rte_bswap64(session->auth.aad_length << 3);
2379 *temp = rte_bswap64(op->sym->aead.data.length << 3);
2380
2381 non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2382 length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2383
2384 aad_addr = op->sym->aead.aad.phys_addr;
2385
2386 /* CMD1 IV Passthru */
2387 rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2388 session->iv.length);
2389 pst.src_addr = session->cipher.nonce_phys;
2390 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2391 pst.len = CCP_SB_BYTES;
2392 pst.dir = 1;
2393 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2394 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2395 ccp_perform_passthru(&pst, cmd_q);
2396
2397 /* CMD2 GHASH-AAD */
2398 function.raw = 0;
2399 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2400 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2401 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2402
2403 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2404 memset(desc, 0, Q_DESC_SIZE);
2405
2406 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2407 CCP_CMD_INIT(desc) = 1;
2408 CCP_CMD_FUNCTION(desc) = function.raw;
2409
2410 CCP_CMD_LEN(desc) = session->auth.aad_length;
2411
2412 CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2413 CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2414 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2415
2416 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2417 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2418 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2419
2420 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2421
2422 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2423 rte_wmb();
2424
2425 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2426 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2427 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2428 cmd_q->qcontrol | CMD_Q_RUN);
2429
2430 /* CMD3 : GCTR Plain text */
2431 function.raw = 0;
2432 CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2433 CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2434 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2435 if (non_align_len == 0)
2436 CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2437 else
2438 CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2439
2440
2441 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2442 memset(desc, 0, Q_DESC_SIZE);
2443
2444 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2445 CCP_CMD_EOM(desc) = 1;
2446 CCP_CMD_FUNCTION(desc) = function.raw;
2447
2448 CCP_CMD_LEN(desc) = length;
2449
2450 CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2451 CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2452 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2453
2454 CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2455 CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2456 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2457
2458 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2459 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2460 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2461
2462 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2463
2464 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2465 rte_wmb();
2466
2467 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2468 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2469 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2470 cmd_q->qcontrol | CMD_Q_RUN);
2471
2472 /* CMD4 : PT to copy IV */
2473 pst.src_addr = session->cipher.nonce_phys;
2474 pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2475 pst.len = AES_BLOCK_SIZE;
2476 pst.dir = 1;
2477 pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2478 pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2479 ccp_perform_passthru(&pst, cmd_q);
2480
2481 /* CMD5 : GHASH-Final */
2482 function.raw = 0;
2483 CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2484 CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2485 CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2486
2487 desc = &cmd_q->qbase_desc[cmd_q->qidx];
2488 memset(desc, 0, Q_DESC_SIZE);
2489
2490 CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2491 CCP_CMD_FUNCTION(desc) = function.raw;
2492 /* Last block (AAD_len || PT_len)*/
2493 CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2494
2495 CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2496 CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2497 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2498
2499 CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2500 CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2501 CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2502
2503 CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2504 CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2505 CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2506
2507 CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2508
2509 cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2510 rte_wmb();
2511
2512 tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2513 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2514 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2515 cmd_q->qcontrol | CMD_Q_RUN);
2516
2517 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2518 return 0;
2519 }
2520
2521 static inline int
ccp_crypto_cipher(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2522 ccp_crypto_cipher(struct rte_crypto_op *op,
2523 struct ccp_queue *cmd_q,
2524 struct ccp_batch_info *b_info)
2525 {
2526 int result = 0;
2527 struct ccp_session *session;
2528
2529 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2530
2531 switch (session->cipher.algo) {
2532 case CCP_CIPHER_ALGO_AES_CBC:
2533 result = ccp_perform_aes(op, cmd_q, b_info);
2534 b_info->desccnt += 2;
2535 break;
2536 case CCP_CIPHER_ALGO_AES_CTR:
2537 result = ccp_perform_aes(op, cmd_q, b_info);
2538 b_info->desccnt += 2;
2539 break;
2540 case CCP_CIPHER_ALGO_AES_ECB:
2541 result = ccp_perform_aes(op, cmd_q, b_info);
2542 b_info->desccnt += 1;
2543 break;
2544 case CCP_CIPHER_ALGO_3DES_CBC:
2545 result = ccp_perform_3des(op, cmd_q, b_info);
2546 b_info->desccnt += 2;
2547 break;
2548 default:
2549 CCP_LOG_ERR("Unsupported cipher algo %d",
2550 session->cipher.algo);
2551 return -ENOTSUP;
2552 }
2553 return result;
2554 }
2555
2556 static inline int
ccp_crypto_auth(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2557 ccp_crypto_auth(struct rte_crypto_op *op,
2558 struct ccp_queue *cmd_q,
2559 struct ccp_batch_info *b_info)
2560 {
2561
2562 int result = 0;
2563 struct ccp_session *session;
2564
2565 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2566
2567 switch (session->auth.algo) {
2568 case CCP_AUTH_ALGO_SHA1:
2569 case CCP_AUTH_ALGO_SHA224:
2570 case CCP_AUTH_ALGO_SHA256:
2571 case CCP_AUTH_ALGO_SHA384:
2572 case CCP_AUTH_ALGO_SHA512:
2573 result = ccp_perform_sha(op, cmd_q);
2574 b_info->desccnt += 3;
2575 break;
2576 case CCP_AUTH_ALGO_MD5_HMAC:
2577 if (session->auth_opt == 0)
2578 result = -1;
2579 break;
2580 case CCP_AUTH_ALGO_SHA1_HMAC:
2581 case CCP_AUTH_ALGO_SHA224_HMAC:
2582 case CCP_AUTH_ALGO_SHA256_HMAC:
2583 if (session->auth_opt == 0) {
2584 result = ccp_perform_hmac(op, cmd_q);
2585 b_info->desccnt += 6;
2586 }
2587 break;
2588 case CCP_AUTH_ALGO_SHA384_HMAC:
2589 case CCP_AUTH_ALGO_SHA512_HMAC:
2590 if (session->auth_opt == 0) {
2591 result = ccp_perform_hmac(op, cmd_q);
2592 b_info->desccnt += 7;
2593 }
2594 break;
2595 case CCP_AUTH_ALGO_SHA3_224:
2596 case CCP_AUTH_ALGO_SHA3_256:
2597 case CCP_AUTH_ALGO_SHA3_384:
2598 case CCP_AUTH_ALGO_SHA3_512:
2599 result = ccp_perform_sha3(op, cmd_q);
2600 b_info->desccnt += 1;
2601 break;
2602 case CCP_AUTH_ALGO_SHA3_224_HMAC:
2603 case CCP_AUTH_ALGO_SHA3_256_HMAC:
2604 result = ccp_perform_sha3_hmac(op, cmd_q);
2605 b_info->desccnt += 3;
2606 break;
2607 case CCP_AUTH_ALGO_SHA3_384_HMAC:
2608 case CCP_AUTH_ALGO_SHA3_512_HMAC:
2609 result = ccp_perform_sha3_hmac(op, cmd_q);
2610 b_info->desccnt += 4;
2611 break;
2612 case CCP_AUTH_ALGO_AES_CMAC:
2613 result = ccp_perform_aes_cmac(op, cmd_q);
2614 b_info->desccnt += 4;
2615 break;
2616 default:
2617 CCP_LOG_ERR("Unsupported auth algo %d",
2618 session->auth.algo);
2619 return -ENOTSUP;
2620 }
2621
2622 return result;
2623 }
2624
2625 static inline int
ccp_crypto_aead(struct rte_crypto_op * op,struct ccp_queue * cmd_q,struct ccp_batch_info * b_info)2626 ccp_crypto_aead(struct rte_crypto_op *op,
2627 struct ccp_queue *cmd_q,
2628 struct ccp_batch_info *b_info)
2629 {
2630 int result = 0;
2631 struct ccp_session *session;
2632
2633 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2634
2635 switch (session->auth.algo) {
2636 case CCP_AUTH_ALGO_AES_GCM:
2637 if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2638 CCP_LOG_ERR("Incorrect chain order");
2639 return -1;
2640 }
2641 result = ccp_perform_aes_gcm(op, cmd_q);
2642 b_info->desccnt += 5;
2643 break;
2644 default:
2645 CCP_LOG_ERR("Unsupported aead algo %d",
2646 session->aead_algo);
2647 return -ENOTSUP;
2648 }
2649 return result;
2650 }
2651
2652 int
process_ops_to_enqueue(struct ccp_qp * qp,struct rte_crypto_op ** op,struct ccp_queue * cmd_q,uint16_t nb_ops,uint16_t total_nb_ops,int slots_req,uint16_t b_idx)2653 process_ops_to_enqueue(struct ccp_qp *qp,
2654 struct rte_crypto_op **op,
2655 struct ccp_queue *cmd_q,
2656 uint16_t nb_ops,
2657 uint16_t total_nb_ops,
2658 int slots_req,
2659 uint16_t b_idx)
2660 {
2661 int i, result = 0;
2662 struct ccp_batch_info *b_info;
2663 struct ccp_session *session;
2664 EVP_MD_CTX *auth_ctx = NULL;
2665
2666 if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2667 CCP_LOG_ERR("batch info allocation failed");
2668 return 0;
2669 }
2670
2671 auth_ctx = EVP_MD_CTX_create();
2672 if (unlikely(!auth_ctx)) {
2673 CCP_LOG_ERR("Unable to create auth ctx");
2674 return 0;
2675 }
2676 b_info->auth_ctr = 0;
2677
2678 /* populate batch info necessary for dequeue */
2679 b_info->op_idx = 0;
2680 b_info->b_idx = 0;
2681 b_info->lsb_buf_idx = 0;
2682 b_info->desccnt = 0;
2683 b_info->cmd_q = cmd_q;
2684 b_info->lsb_buf_phys = (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf);
2685
2686 rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2687
2688 b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2689 Q_DESC_SIZE);
2690 for (i = b_idx; i < (nb_ops+b_idx); i++) {
2691 session = CRYPTODEV_GET_SYM_SESS_PRIV(op[i]->sym->session);
2692 switch (session->cmd_id) {
2693 case CCP_CMD_CIPHER:
2694 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2695 break;
2696 case CCP_CMD_AUTH:
2697 if (session->auth_opt) {
2698 b_info->auth_ctr++;
2699 result = cpu_crypto_auth(qp, op[i],
2700 session, auth_ctx);
2701 } else
2702 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2703 break;
2704 case CCP_CMD_CIPHER_HASH:
2705 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2706 if (result)
2707 break;
2708 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2709 break;
2710 case CCP_CMD_HASH_CIPHER:
2711 if (session->auth_opt) {
2712 result = cpu_crypto_auth(qp, op[i],
2713 session, auth_ctx);
2714 if (op[i]->status !=
2715 RTE_CRYPTO_OP_STATUS_SUCCESS)
2716 CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2717 } else
2718 result = ccp_crypto_auth(op[i], cmd_q, b_info);
2719
2720 if (result)
2721 break;
2722 result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2723 break;
2724 case CCP_CMD_COMBINED:
2725 result = ccp_crypto_aead(op[i], cmd_q, b_info);
2726 break;
2727 default:
2728 CCP_LOG_ERR("Unsupported cmd_id");
2729 result = -1;
2730 }
2731 if (unlikely(result < 0)) {
2732 rte_atomic64_add(&b_info->cmd_q->free_slots,
2733 (slots_req - b_info->desccnt));
2734 break;
2735 }
2736 b_info->op[i] = op[i];
2737 }
2738
2739 b_info->opcnt = i;
2740 b_info->b_idx = b_idx;
2741 b_info->total_nb_ops = total_nb_ops;
2742 b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2743 Q_DESC_SIZE);
2744
2745 rte_wmb();
2746 /* Write the new tail address back to the queue register */
2747 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2748 b_info->tail_offset);
2749 /* Turn the queue back on using our cached control register */
2750 CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2751 cmd_q->qcontrol | CMD_Q_RUN);
2752
2753 rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2754
2755 EVP_MD_CTX_destroy(auth_ctx);
2756 return i-b_idx;
2757 }
2758
ccp_auth_dq_prepare(struct rte_crypto_op * op)2759 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2760 {
2761 struct ccp_session *session;
2762 uint8_t *digest_data, *addr;
2763 struct rte_mbuf *m_last;
2764 int offset, digest_offset;
2765 uint8_t digest_le[64];
2766
2767 session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2768
2769 if (session->cmd_id == CCP_CMD_COMBINED) {
2770 digest_data = op->sym->aead.digest.data;
2771 digest_offset = op->sym->aead.data.offset +
2772 op->sym->aead.data.length;
2773 } else {
2774 digest_data = op->sym->auth.digest.data;
2775 digest_offset = op->sym->auth.data.offset +
2776 op->sym->auth.data.length;
2777 }
2778 m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2779 addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2780 m_last->data_len - session->auth.ctx_len);
2781
2782 rte_mb();
2783 offset = session->auth.offset;
2784
2785 if (session->auth.engine == CCP_ENGINE_SHA)
2786 if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2787 (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2788 (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2789 /* All other algorithms require byte
2790 * swap done by host
2791 */
2792 unsigned int i;
2793
2794 offset = session->auth.ctx_len -
2795 session->auth.offset - 1;
2796 for (i = 0; i < session->auth.digest_length; i++)
2797 digest_le[i] = addr[offset - i];
2798 offset = 0;
2799 addr = digest_le;
2800 }
2801
2802 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2803 if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2804 if (memcmp(addr + offset, digest_data,
2805 session->auth.digest_length) != 0)
2806 op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2807
2808 } else {
2809 if (unlikely(digest_data == 0))
2810 digest_data = rte_pktmbuf_mtod_offset(
2811 op->sym->m_dst, uint8_t *,
2812 digest_offset);
2813 rte_memcpy(digest_data, addr + offset,
2814 session->auth.digest_length);
2815 }
2816 /* Trim area used for digest from mbuf. */
2817 rte_pktmbuf_trim(op->sym->m_src,
2818 session->auth.ctx_len);
2819 }
2820
2821 static int
ccp_prepare_ops(struct ccp_qp * qp,struct rte_crypto_op ** op_d,struct ccp_batch_info * b_info,uint16_t nb_ops)2822 ccp_prepare_ops(struct ccp_qp *qp,
2823 struct rte_crypto_op **op_d,
2824 struct ccp_batch_info *b_info,
2825 uint16_t nb_ops)
2826 {
2827 int i, min_ops;
2828 struct ccp_session *session;
2829
2830 EVP_MD_CTX *auth_ctx = NULL;
2831
2832 auth_ctx = EVP_MD_CTX_create();
2833 if (unlikely(!auth_ctx)) {
2834 CCP_LOG_ERR("Unable to create auth ctx");
2835 return 0;
2836 }
2837 min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2838
2839 for (i = b_info->b_idx; i < min_ops; i++) {
2840 op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2841 session = CRYPTODEV_GET_SYM_SESS_PRIV(op_d[i]->sym->session);
2842 switch (session->cmd_id) {
2843 case CCP_CMD_CIPHER:
2844 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2845 break;
2846 case CCP_CMD_AUTH:
2847 if (session->auth_opt == 0)
2848 ccp_auth_dq_prepare(op_d[i]);
2849 break;
2850 case CCP_CMD_CIPHER_HASH:
2851 if (session->auth_opt)
2852 cpu_crypto_auth(qp, op_d[i],
2853 session, auth_ctx);
2854 else
2855 ccp_auth_dq_prepare(op_d[i]);
2856 break;
2857 case CCP_CMD_HASH_CIPHER:
2858 if (session->auth_opt)
2859 op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2860 else
2861 ccp_auth_dq_prepare(op_d[i]);
2862 break;
2863 case CCP_CMD_COMBINED:
2864 ccp_auth_dq_prepare(op_d[i]);
2865 break;
2866 default:
2867 CCP_LOG_ERR("Unsupported cmd_id");
2868 }
2869 }
2870
2871 EVP_MD_CTX_destroy(auth_ctx);
2872 b_info->opcnt -= min_ops;
2873 return min_ops;
2874 }
2875
2876 int
process_ops_to_dequeue(struct ccp_qp * qp,struct rte_crypto_op ** op,uint16_t nb_ops,uint16_t * total_nb_ops)2877 process_ops_to_dequeue(struct ccp_qp *qp,
2878 struct rte_crypto_op **op,
2879 uint16_t nb_ops,
2880 uint16_t *total_nb_ops)
2881 {
2882 struct ccp_batch_info *b_info;
2883 uint32_t cur_head_offset;
2884
2885 if (qp->b_info != NULL) {
2886 b_info = qp->b_info;
2887 if (unlikely(b_info->op_idx > 0))
2888 goto success;
2889 } else if (rte_ring_dequeue(qp->processed_pkts,
2890 (void **)&b_info))
2891 return 0;
2892
2893 if (b_info->auth_ctr == b_info->opcnt)
2894 goto success;
2895 *total_nb_ops = b_info->total_nb_ops;
2896 cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2897 CMD_Q_HEAD_LO_BASE);
2898
2899 if (b_info->head_offset < b_info->tail_offset) {
2900 if ((cur_head_offset >= b_info->head_offset) &&
2901 (cur_head_offset < b_info->tail_offset)) {
2902 qp->b_info = b_info;
2903 return 0;
2904 }
2905 } else if (b_info->tail_offset != b_info->head_offset) {
2906 if ((cur_head_offset >= b_info->head_offset) ||
2907 (cur_head_offset < b_info->tail_offset)) {
2908 qp->b_info = b_info;
2909 return 0;
2910 }
2911 }
2912
2913
2914 success:
2915 *total_nb_ops = b_info->total_nb_ops;
2916 nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
2917 rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2918 b_info->desccnt = 0;
2919 if (b_info->opcnt > 0) {
2920 qp->b_info = b_info;
2921 } else {
2922 rte_mempool_put(qp->batch_mp, (void *)b_info);
2923 qp->b_info = NULL;
2924 }
2925
2926 return nb_ops;
2927 }
2928