xref: /dpdk/drivers/crypto/ccp/ccp_crypto.c (revision 665b49c51639a10c553433bc2bcd85c7331c631e)
1 /*   SPDX-License-Identifier: BSD-3-Clause
2  *   Copyright(c) 2018 Advanced Micro Devices, Inc. All rights reserved.
3  */
4 
5 #define OPENSSL_API_COMPAT 0x10100000L
6 
7 #include <dirent.h>
8 #include <fcntl.h>
9 #include <stdio.h>
10 #include <string.h>
11 #include <sys/mman.h>
12 #include <sys/queue.h>
13 #include <sys/types.h>
14 #include <unistd.h>
15 #include <openssl/sha.h>
16 #include <openssl/cmac.h> /*sub key apis*/
17 #include <openssl/evp.h> /*sub key apis*/
18 
19 #include <rte_hexdump.h>
20 #include <rte_memzone.h>
21 #include <rte_malloc.h>
22 #include <rte_memory.h>
23 #include <rte_spinlock.h>
24 #include <rte_string_fns.h>
25 #include <cryptodev_pmd.h>
26 
27 #include "ccp_dev.h"
28 #include "ccp_crypto.h"
29 #include "ccp_pci.h"
30 #include "ccp_pmd_private.h"
31 
32 #include <openssl/conf.h>
33 #include <openssl/err.h>
34 #include <openssl/hmac.h>
35 
36 /* SHA initial context values */
37 uint32_t ccp_sha1_init[SHA_COMMON_DIGEST_SIZE / sizeof(uint32_t)] = {
38 	SHA1_H4, SHA1_H3,
39 	SHA1_H2, SHA1_H1,
40 	SHA1_H0, 0x0U,
41 	0x0U, 0x0U,
42 };
43 
44 uint32_t ccp_sha224_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
45 	SHA224_H7, SHA224_H6,
46 	SHA224_H5, SHA224_H4,
47 	SHA224_H3, SHA224_H2,
48 	SHA224_H1, SHA224_H0,
49 };
50 
51 uint32_t ccp_sha256_init[SHA256_DIGEST_SIZE / sizeof(uint32_t)] = {
52 	SHA256_H7, SHA256_H6,
53 	SHA256_H5, SHA256_H4,
54 	SHA256_H3, SHA256_H2,
55 	SHA256_H1, SHA256_H0,
56 };
57 
58 uint64_t ccp_sha384_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
59 	SHA384_H7, SHA384_H6,
60 	SHA384_H5, SHA384_H4,
61 	SHA384_H3, SHA384_H2,
62 	SHA384_H1, SHA384_H0,
63 };
64 
65 uint64_t ccp_sha512_init[SHA512_DIGEST_SIZE / sizeof(uint64_t)] = {
66 	SHA512_H7, SHA512_H6,
67 	SHA512_H5, SHA512_H4,
68 	SHA512_H3, SHA512_H2,
69 	SHA512_H1, SHA512_H0,
70 };
71 
72 #if defined(_MSC_VER)
73 #define SHA3_CONST(x) x
74 #else
75 #define SHA3_CONST(x) x##L
76 #endif
77 
78 /** 'Words' here refers to uint64_t */
79 #define SHA3_KECCAK_SPONGE_WORDS \
80 	(((1600) / 8) / sizeof(uint64_t))
81 typedef struct sha3_context_ {
82 	uint64_t saved;
83 	/**
84 	 * The portion of the input message that we
85 	 * didn't consume yet
86 	 */
87 	union {
88 		uint64_t s[SHA3_KECCAK_SPONGE_WORDS];
89 		/* Keccak's state */
90 		uint8_t sb[SHA3_KECCAK_SPONGE_WORDS * 8];
91 		/**total 200 ctx size**/
92 	};
93 	unsigned int byteIndex;
94 	/**
95 	 * 0..7--the next byte after the set one
96 	 * (starts from 0; 0--none are buffered)
97 	 */
98 	unsigned int wordIndex;
99 	/**
100 	 * 0..24--the next word to integrate input
101 	 * (starts from 0)
102 	 */
103 	unsigned int capacityWords;
104 	/**
105 	 * the double size of the hash output in
106 	 * words (e.g. 16 for Keccak 512)
107 	 */
108 } sha3_context;
109 
110 #ifndef SHA3_ROTL64
111 #define SHA3_ROTL64(x, y) \
112 	(((x) << (y)) | ((x) >> ((sizeof(uint64_t)*8) - (y))))
113 #endif
114 
115 static const uint64_t keccakf_rndc[24] = {
116 	SHA3_CONST(0x0000000000000001UL), SHA3_CONST(0x0000000000008082UL),
117 	SHA3_CONST(0x800000000000808aUL), SHA3_CONST(0x8000000080008000UL),
118 	SHA3_CONST(0x000000000000808bUL), SHA3_CONST(0x0000000080000001UL),
119 	SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008009UL),
120 	SHA3_CONST(0x000000000000008aUL), SHA3_CONST(0x0000000000000088UL),
121 	SHA3_CONST(0x0000000080008009UL), SHA3_CONST(0x000000008000000aUL),
122 	SHA3_CONST(0x000000008000808bUL), SHA3_CONST(0x800000000000008bUL),
123 	SHA3_CONST(0x8000000000008089UL), SHA3_CONST(0x8000000000008003UL),
124 	SHA3_CONST(0x8000000000008002UL), SHA3_CONST(0x8000000000000080UL),
125 	SHA3_CONST(0x000000000000800aUL), SHA3_CONST(0x800000008000000aUL),
126 	SHA3_CONST(0x8000000080008081UL), SHA3_CONST(0x8000000000008080UL),
127 	SHA3_CONST(0x0000000080000001UL), SHA3_CONST(0x8000000080008008UL)
128 };
129 
130 static const unsigned int keccakf_rotc[24] = {
131 	1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62,
132 	18, 39, 61, 20, 44
133 };
134 
135 static const unsigned int keccakf_piln[24] = {
136 	10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20,
137 	14, 22, 9, 6, 1
138 };
139 
140 static enum ccp_cmd_order
141 ccp_get_cmd_id(const struct rte_crypto_sym_xform *xform)
142 {
143 	enum ccp_cmd_order res = CCP_CMD_NOT_SUPPORTED;
144 
145 	if (xform == NULL)
146 		return res;
147 	if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
148 		if (xform->next == NULL)
149 			return CCP_CMD_AUTH;
150 		else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
151 			return CCP_CMD_HASH_CIPHER;
152 	}
153 	if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
154 		if (xform->next == NULL)
155 			return CCP_CMD_CIPHER;
156 		else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
157 			return CCP_CMD_CIPHER_HASH;
158 	}
159 	if (xform->type == RTE_CRYPTO_SYM_XFORM_AEAD)
160 		return CCP_CMD_COMBINED;
161 	return res;
162 }
163 
164 /* partial hash using openssl */
165 static int partial_hash_sha1(uint8_t *data_in, uint8_t *data_out)
166 {
167 	SHA_CTX ctx;
168 
169 	if (!SHA1_Init(&ctx))
170 		return -EFAULT;
171 	SHA1_Transform(&ctx, data_in);
172 	rte_memcpy(data_out, &ctx, SHA_DIGEST_LENGTH);
173 	return 0;
174 }
175 
176 static int partial_hash_sha224(uint8_t *data_in, uint8_t *data_out)
177 {
178 	SHA256_CTX ctx;
179 
180 	if (!SHA224_Init(&ctx))
181 		return -EFAULT;
182 	SHA256_Transform(&ctx, data_in);
183 	rte_memcpy(data_out, &ctx,
184 		   SHA256_DIGEST_LENGTH);
185 	return 0;
186 }
187 
188 static int partial_hash_sha256(uint8_t *data_in, uint8_t *data_out)
189 {
190 	SHA256_CTX ctx;
191 
192 	if (!SHA256_Init(&ctx))
193 		return -EFAULT;
194 	SHA256_Transform(&ctx, data_in);
195 	rte_memcpy(data_out, &ctx,
196 		   SHA256_DIGEST_LENGTH);
197 	return 0;
198 }
199 
200 static int partial_hash_sha384(uint8_t *data_in, uint8_t *data_out)
201 {
202 	SHA512_CTX ctx;
203 
204 	if (!SHA384_Init(&ctx))
205 		return -EFAULT;
206 	SHA512_Transform(&ctx, data_in);
207 	rte_memcpy(data_out, &ctx,
208 		   SHA512_DIGEST_LENGTH);
209 	return 0;
210 }
211 
212 static int partial_hash_sha512(uint8_t *data_in, uint8_t *data_out)
213 {
214 	SHA512_CTX ctx;
215 
216 	if (!SHA512_Init(&ctx))
217 		return -EFAULT;
218 	SHA512_Transform(&ctx, data_in);
219 	rte_memcpy(data_out, &ctx,
220 		   SHA512_DIGEST_LENGTH);
221 	return 0;
222 }
223 
224 static void
225 keccakf(uint64_t s[25])
226 {
227 	int i, j, round;
228 	uint64_t t, bc[5];
229 #define KECCAK_ROUNDS 24
230 
231 	for (round = 0; round < KECCAK_ROUNDS; round++) {
232 
233 		/* Theta */
234 		for (i = 0; i < 5; i++)
235 			bc[i] = s[i] ^ s[i + 5] ^ s[i + 10] ^ s[i + 15] ^
236 				s[i + 20];
237 
238 		for (i = 0; i < 5; i++) {
239 			t = bc[(i + 4) % 5] ^ SHA3_ROTL64(bc[(i + 1) % 5], 1);
240 			for (j = 0; j < 25; j += 5)
241 				s[j + i] ^= t;
242 		}
243 
244 		/* Rho Pi */
245 		t = s[1];
246 		for (i = 0; i < 24; i++) {
247 			j = keccakf_piln[i];
248 			bc[0] = s[j];
249 			s[j] = SHA3_ROTL64(t, keccakf_rotc[i]);
250 			t = bc[0];
251 		}
252 
253 		/* Chi */
254 		for (j = 0; j < 25; j += 5) {
255 			for (i = 0; i < 5; i++)
256 				bc[i] = s[j + i];
257 			for (i = 0; i < 5; i++)
258 				s[j + i] ^= (~bc[(i + 1) % 5]) &
259 					    bc[(i + 2) % 5];
260 		}
261 
262 		/* Iota */
263 		s[0] ^= keccakf_rndc[round];
264 	}
265 }
266 
267 static void
268 sha3_Init224(void *priv)
269 {
270 	sha3_context *ctx = (sha3_context *) priv;
271 
272 	memset(ctx, 0, sizeof(*ctx));
273 	ctx->capacityWords = 2 * 224 / (8 * sizeof(uint64_t));
274 }
275 
276 static void
277 sha3_Init256(void *priv)
278 {
279 	sha3_context *ctx = (sha3_context *) priv;
280 
281 	memset(ctx, 0, sizeof(*ctx));
282 	ctx->capacityWords = 2 * 256 / (8 * sizeof(uint64_t));
283 }
284 
285 static void
286 sha3_Init384(void *priv)
287 {
288 	sha3_context *ctx = (sha3_context *) priv;
289 
290 	memset(ctx, 0, sizeof(*ctx));
291 	ctx->capacityWords = 2 * 384 / (8 * sizeof(uint64_t));
292 }
293 
294 static void
295 sha3_Init512(void *priv)
296 {
297 	sha3_context *ctx = (sha3_context *) priv;
298 
299 	memset(ctx, 0, sizeof(*ctx));
300 	ctx->capacityWords = 2 * 512 / (8 * sizeof(uint64_t));
301 }
302 
303 
304 /* This is simply the 'update' with the padding block.
305  * The padding block is 0x01 || 0x00* || 0x80. First 0x01 and last 0x80
306  * bytes are always present, but they can be the same byte.
307  */
308 static void
309 sha3_Update(void *priv, void const *bufIn, size_t len)
310 {
311 	sha3_context *ctx = (sha3_context *) priv;
312 	unsigned int old_tail = (8 - ctx->byteIndex) & 7;
313 	size_t words;
314 	unsigned int tail;
315 	size_t i;
316 	const uint8_t *buf = bufIn;
317 
318 	if (len < old_tail) {
319 		while (len--)
320 			ctx->saved |= (uint64_t) (*(buf++)) <<
321 				      ((ctx->byteIndex++) * 8);
322 		return;
323 	}
324 
325 	if (old_tail) {
326 		len -= old_tail;
327 		while (old_tail--)
328 			ctx->saved |= (uint64_t) (*(buf++)) <<
329 				      ((ctx->byteIndex++) * 8);
330 
331 		ctx->s[ctx->wordIndex] ^= ctx->saved;
332 		ctx->byteIndex = 0;
333 		ctx->saved = 0;
334 		if (++ctx->wordIndex ==
335 		   (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
336 			keccakf(ctx->s);
337 			ctx->wordIndex = 0;
338 		}
339 	}
340 
341 	words = len / sizeof(uint64_t);
342 	tail = len - words * sizeof(uint64_t);
343 
344 	for (i = 0; i < words; i++, buf += sizeof(uint64_t)) {
345 		const uint64_t t = (uint64_t) (buf[0]) |
346 			((uint64_t) (buf[1]) << 8 * 1) |
347 			((uint64_t) (buf[2]) << 8 * 2) |
348 			((uint64_t) (buf[3]) << 8 * 3) |
349 			((uint64_t) (buf[4]) << 8 * 4) |
350 			((uint64_t) (buf[5]) << 8 * 5) |
351 			((uint64_t) (buf[6]) << 8 * 6) |
352 			((uint64_t) (buf[7]) << 8 * 7);
353 		ctx->s[ctx->wordIndex] ^= t;
354 		if (++ctx->wordIndex ==
355 		   (SHA3_KECCAK_SPONGE_WORDS - ctx->capacityWords)) {
356 			keccakf(ctx->s);
357 			ctx->wordIndex = 0;
358 		}
359 	}
360 
361 	while (tail--)
362 		ctx->saved |= (uint64_t) (*(buf++)) << ((ctx->byteIndex++) * 8);
363 }
364 
365 int partial_hash_sha3_224(uint8_t *data_in, uint8_t *data_out)
366 {
367 	sha3_context *ctx;
368 	int i;
369 
370 	ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
371 	if (!ctx) {
372 		CCP_LOG_ERR("sha3-ctx creation failed");
373 		return -ENOMEM;
374 	}
375 	sha3_Init224(ctx);
376 	sha3_Update(ctx, data_in, SHA3_224_BLOCK_SIZE);
377 	for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
378 		*data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
379 	rte_free(ctx);
380 
381 	return 0;
382 }
383 
384 int partial_hash_sha3_256(uint8_t *data_in, uint8_t *data_out)
385 {
386 	sha3_context *ctx;
387 	int i;
388 
389 	ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
390 	if (!ctx) {
391 		CCP_LOG_ERR("sha3-ctx creation failed");
392 		return -ENOMEM;
393 	}
394 	sha3_Init256(ctx);
395 	sha3_Update(ctx, data_in, SHA3_256_BLOCK_SIZE);
396 	for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
397 		*data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
398 	rte_free(ctx);
399 
400 	return 0;
401 }
402 
403 int partial_hash_sha3_384(uint8_t *data_in, uint8_t *data_out)
404 {
405 	sha3_context *ctx;
406 	int i;
407 
408 	ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
409 	if (!ctx) {
410 		CCP_LOG_ERR("sha3-ctx creation failed");
411 		return -ENOMEM;
412 	}
413 	sha3_Init384(ctx);
414 	sha3_Update(ctx, data_in, SHA3_384_BLOCK_SIZE);
415 	for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
416 		*data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
417 	rte_free(ctx);
418 
419 	return 0;
420 }
421 
422 int partial_hash_sha3_512(uint8_t *data_in, uint8_t *data_out)
423 {
424 	sha3_context *ctx;
425 	int i;
426 
427 	ctx = rte_zmalloc("sha3-ctx", sizeof(sha3_context), 0);
428 	if (!ctx) {
429 		CCP_LOG_ERR("sha3-ctx creation failed");
430 		return -ENOMEM;
431 	}
432 	sha3_Init512(ctx);
433 	sha3_Update(ctx, data_in, SHA3_512_BLOCK_SIZE);
434 	for (i = 0; i < CCP_SHA3_CTX_SIZE; i++, data_out++)
435 		*data_out = ctx->sb[CCP_SHA3_CTX_SIZE - i - 1];
436 	rte_free(ctx);
437 
438 	return 0;
439 }
440 
441 static int generate_partial_hash(struct ccp_session *sess)
442 {
443 
444 	uint8_t ipad[sess->auth.block_size];
445 	uint8_t	opad[sess->auth.block_size];
446 	uint8_t *ipad_t, *opad_t;
447 	uint32_t *hash_value_be32, hash_temp32[8];
448 	uint64_t *hash_value_be64, hash_temp64[8];
449 	int i, count;
450 	uint8_t *hash_value_sha3;
451 
452 	opad_t = ipad_t = (uint8_t *)sess->auth.key;
453 
454 	hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute);
455 	hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute);
456 
457 	/* considering key size is always equal to block size of algorithm */
458 	for (i = 0; i < sess->auth.block_size; i++) {
459 		ipad[i] = (ipad_t[i] ^ HMAC_IPAD_VALUE);
460 		opad[i] = (opad_t[i] ^ HMAC_OPAD_VALUE);
461 	}
462 
463 	switch (sess->auth.algo) {
464 	case CCP_AUTH_ALGO_SHA1_HMAC:
465 		count = SHA1_DIGEST_SIZE >> 2;
466 
467 		if (partial_hash_sha1(ipad, (uint8_t *)hash_temp32))
468 			return -1;
469 		for (i = 0; i < count; i++, hash_value_be32++)
470 			*hash_value_be32 = hash_temp32[count - 1 - i];
471 
472 		hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
473 					       + sess->auth.ctx_len);
474 		if (partial_hash_sha1(opad, (uint8_t *)hash_temp32))
475 			return -1;
476 		for (i = 0; i < count; i++, hash_value_be32++)
477 			*hash_value_be32 = hash_temp32[count - 1 - i];
478 		return 0;
479 	case CCP_AUTH_ALGO_SHA224_HMAC:
480 		count = SHA256_DIGEST_SIZE >> 2;
481 
482 		if (partial_hash_sha224(ipad, (uint8_t *)hash_temp32))
483 			return -1;
484 		for (i = 0; i < count; i++, hash_value_be32++)
485 			*hash_value_be32 = hash_temp32[count - 1 - i];
486 
487 		hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
488 					       + sess->auth.ctx_len);
489 		if (partial_hash_sha224(opad, (uint8_t *)hash_temp32))
490 			return -1;
491 		for (i = 0; i < count; i++, hash_value_be32++)
492 			*hash_value_be32 = hash_temp32[count - 1 - i];
493 		return 0;
494 	case CCP_AUTH_ALGO_SHA3_224_HMAC:
495 		hash_value_sha3 = sess->auth.pre_compute;
496 		if (partial_hash_sha3_224(ipad, hash_value_sha3))
497 			return -1;
498 
499 		hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
500 					       + sess->auth.ctx_len);
501 		if (partial_hash_sha3_224(opad, hash_value_sha3))
502 			return -1;
503 		return 0;
504 	case CCP_AUTH_ALGO_SHA256_HMAC:
505 		count = SHA256_DIGEST_SIZE >> 2;
506 
507 		if (partial_hash_sha256(ipad, (uint8_t *)hash_temp32))
508 			return -1;
509 		for (i = 0; i < count; i++, hash_value_be32++)
510 			*hash_value_be32 = hash_temp32[count - 1 - i];
511 
512 		hash_value_be32 = (uint32_t *)((uint8_t *)sess->auth.pre_compute
513 					       + sess->auth.ctx_len);
514 		if (partial_hash_sha256(opad, (uint8_t *)hash_temp32))
515 			return -1;
516 		for (i = 0; i < count; i++, hash_value_be32++)
517 			*hash_value_be32 = hash_temp32[count - 1 - i];
518 		return 0;
519 	case CCP_AUTH_ALGO_SHA3_256_HMAC:
520 		hash_value_sha3 = sess->auth.pre_compute;
521 		if (partial_hash_sha3_256(ipad, hash_value_sha3))
522 			return -1;
523 
524 		hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
525 					      + sess->auth.ctx_len);
526 		if (partial_hash_sha3_256(opad, hash_value_sha3))
527 			return -1;
528 		return 0;
529 	case CCP_AUTH_ALGO_SHA384_HMAC:
530 		count = SHA512_DIGEST_SIZE >> 3;
531 
532 		if (partial_hash_sha384(ipad, (uint8_t *)hash_temp64))
533 			return -1;
534 		for (i = 0; i < count; i++, hash_value_be64++)
535 			*hash_value_be64 = hash_temp64[count - 1 - i];
536 
537 		hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
538 					       + sess->auth.ctx_len);
539 		if (partial_hash_sha384(opad, (uint8_t *)hash_temp64))
540 			return -1;
541 		for (i = 0; i < count; i++, hash_value_be64++)
542 			*hash_value_be64 = hash_temp64[count - 1 - i];
543 		return 0;
544 	case CCP_AUTH_ALGO_SHA3_384_HMAC:
545 		hash_value_sha3 = sess->auth.pre_compute;
546 		if (partial_hash_sha3_384(ipad, hash_value_sha3))
547 			return -1;
548 
549 		hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
550 					      + sess->auth.ctx_len);
551 		if (partial_hash_sha3_384(opad, hash_value_sha3))
552 			return -1;
553 		return 0;
554 	case CCP_AUTH_ALGO_SHA512_HMAC:
555 		count = SHA512_DIGEST_SIZE >> 3;
556 
557 		if (partial_hash_sha512(ipad, (uint8_t *)hash_temp64))
558 			return -1;
559 		for (i = 0; i < count; i++, hash_value_be64++)
560 			*hash_value_be64 = hash_temp64[count - 1 - i];
561 
562 		hash_value_be64 = (uint64_t *)((uint8_t *)sess->auth.pre_compute
563 					       + sess->auth.ctx_len);
564 		if (partial_hash_sha512(opad, (uint8_t *)hash_temp64))
565 			return -1;
566 		for (i = 0; i < count; i++, hash_value_be64++)
567 			*hash_value_be64 = hash_temp64[count - 1 - i];
568 		return 0;
569 	case CCP_AUTH_ALGO_SHA3_512_HMAC:
570 		hash_value_sha3 = sess->auth.pre_compute;
571 		if (partial_hash_sha3_512(ipad, hash_value_sha3))
572 			return -1;
573 
574 		hash_value_sha3 = (uint8_t *)(sess->auth.pre_compute
575 					      + sess->auth.ctx_len);
576 		if (partial_hash_sha3_512(opad, hash_value_sha3))
577 			return -1;
578 		return 0;
579 	default:
580 		CCP_LOG_ERR("Invalid auth algo");
581 		return -1;
582 	}
583 }
584 
585 /* prepare temporary keys K1 and K2 */
586 static void prepare_key(unsigned char *k, unsigned char *l, int bl)
587 {
588 	int i;
589 	/* Shift block to left, including carry */
590 	for (i = 0; i < bl; i++) {
591 		k[i] = l[i] << 1;
592 		if (i < bl - 1 && l[i + 1] & 0x80)
593 			k[i] |= 1;
594 	}
595 	/* If MSB set fixup with R */
596 	if (l[0] & 0x80)
597 		k[bl - 1] ^= bl == 16 ? 0x87 : 0x1b;
598 }
599 
600 /* subkeys K1 and K2 generation for CMAC */
601 static int
602 generate_cmac_subkeys(struct ccp_session *sess)
603 {
604 	const EVP_CIPHER *algo;
605 	EVP_CIPHER_CTX *ctx;
606 	unsigned char *ccp_ctx;
607 	size_t i;
608 	int dstlen, totlen;
609 	unsigned char zero_iv[AES_BLOCK_SIZE] = {0};
610 	unsigned char dst[2 * AES_BLOCK_SIZE] = {0};
611 	unsigned char k1[AES_BLOCK_SIZE] = {0};
612 	unsigned char k2[AES_BLOCK_SIZE] = {0};
613 
614 	if (sess->auth.ut.aes_type == CCP_AES_TYPE_128)
615 		algo =  EVP_aes_128_cbc();
616 	else if (sess->auth.ut.aes_type == CCP_AES_TYPE_192)
617 		algo =  EVP_aes_192_cbc();
618 	else if (sess->auth.ut.aes_type == CCP_AES_TYPE_256)
619 		algo =  EVP_aes_256_cbc();
620 	else {
621 		CCP_LOG_ERR("Invalid CMAC type length");
622 		return -1;
623 	}
624 
625 	ctx = EVP_CIPHER_CTX_new();
626 	if (!ctx) {
627 		CCP_LOG_ERR("ctx creation failed");
628 		return -1;
629 	}
630 	if (EVP_EncryptInit(ctx, algo, (unsigned char *)sess->auth.key,
631 			    (unsigned char *)zero_iv) <= 0)
632 		goto key_generate_err;
633 	if (EVP_CIPHER_CTX_set_padding(ctx, 0) <= 0)
634 		goto key_generate_err;
635 	if (EVP_EncryptUpdate(ctx, dst, &dstlen, zero_iv,
636 			      AES_BLOCK_SIZE) <= 0)
637 		goto key_generate_err;
638 	if (EVP_EncryptFinal_ex(ctx, dst + dstlen, &totlen) <= 0)
639 		goto key_generate_err;
640 
641 	memset(sess->auth.pre_compute, 0, CCP_SB_BYTES * 2);
642 
643 	ccp_ctx = (unsigned char *)(sess->auth.pre_compute + CCP_SB_BYTES - 1);
644 	prepare_key(k1, dst, AES_BLOCK_SIZE);
645 	for (i = 0; i < AES_BLOCK_SIZE;  i++, ccp_ctx--)
646 		*ccp_ctx = k1[i];
647 
648 	ccp_ctx = (unsigned char *)(sess->auth.pre_compute +
649 				   (2 * CCP_SB_BYTES) - 1);
650 	prepare_key(k2, k1, AES_BLOCK_SIZE);
651 	for (i = 0; i < AES_BLOCK_SIZE;  i++, ccp_ctx--)
652 		*ccp_ctx = k2[i];
653 
654 	EVP_CIPHER_CTX_free(ctx);
655 
656 	return 0;
657 
658 key_generate_err:
659 	CCP_LOG_ERR("CMAC Init failed");
660 		return -1;
661 }
662 
663 /* configure session */
664 static int
665 ccp_configure_session_cipher(struct ccp_session *sess,
666 			     const struct rte_crypto_sym_xform *xform)
667 {
668 	const struct rte_crypto_cipher_xform *cipher_xform = NULL;
669 	size_t i, j, x;
670 
671 	cipher_xform = &xform->cipher;
672 
673 	/* set cipher direction */
674 	if (cipher_xform->op ==  RTE_CRYPTO_CIPHER_OP_ENCRYPT)
675 		sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
676 	else
677 		sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
678 
679 	/* set cipher key */
680 	sess->cipher.key_length = cipher_xform->key.length;
681 	rte_memcpy(sess->cipher.key, cipher_xform->key.data,
682 		   cipher_xform->key.length);
683 
684 	/* set iv parameters */
685 	sess->iv.offset = cipher_xform->iv.offset;
686 	sess->iv.length = cipher_xform->iv.length;
687 
688 	switch (cipher_xform->algo) {
689 	case RTE_CRYPTO_CIPHER_AES_CTR:
690 		sess->cipher.algo = CCP_CIPHER_ALGO_AES_CTR;
691 		sess->cipher.um.aes_mode = CCP_AES_MODE_CTR;
692 		sess->cipher.engine = CCP_ENGINE_AES;
693 		break;
694 	case RTE_CRYPTO_CIPHER_AES_ECB:
695 		sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
696 		sess->cipher.um.aes_mode = CCP_AES_MODE_ECB;
697 		sess->cipher.engine = CCP_ENGINE_AES;
698 		break;
699 	case RTE_CRYPTO_CIPHER_AES_CBC:
700 		sess->cipher.algo = CCP_CIPHER_ALGO_AES_CBC;
701 		sess->cipher.um.aes_mode = CCP_AES_MODE_CBC;
702 		sess->cipher.engine = CCP_ENGINE_AES;
703 		break;
704 	case RTE_CRYPTO_CIPHER_3DES_CBC:
705 		sess->cipher.algo = CCP_CIPHER_ALGO_3DES_CBC;
706 		sess->cipher.um.des_mode = CCP_DES_MODE_CBC;
707 		sess->cipher.engine = CCP_ENGINE_3DES;
708 		break;
709 	default:
710 		CCP_LOG_ERR("Unsupported cipher algo");
711 		return -1;
712 	}
713 
714 
715 	switch (sess->cipher.engine) {
716 	case CCP_ENGINE_AES:
717 		if (sess->cipher.key_length == 16)
718 			sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
719 		else if (sess->cipher.key_length == 24)
720 			sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
721 		else if (sess->cipher.key_length == 32)
722 			sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
723 		else {
724 			CCP_LOG_ERR("Invalid cipher key length");
725 			return -1;
726 		}
727 		for (i = 0; i < sess->cipher.key_length ; i++)
728 			sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
729 				sess->cipher.key[i];
730 		break;
731 	case CCP_ENGINE_3DES:
732 		if (sess->cipher.key_length == 16)
733 			sess->cipher.ut.des_type = CCP_DES_TYPE_128;
734 		else if (sess->cipher.key_length == 24)
735 			sess->cipher.ut.des_type = CCP_DES_TYPE_192;
736 		else {
737 			CCP_LOG_ERR("Invalid cipher key length");
738 			return -1;
739 		}
740 		for (j = 0, x = 0; j < sess->cipher.key_length/8; j++, x += 8)
741 			for (i = 0; i < 8; i++)
742 				sess->cipher.key_ccp[(8 + x) - i - 1] =
743 					sess->cipher.key[i + x];
744 		break;
745 	default:
746 		CCP_LOG_ERR("Invalid CCP Engine");
747 		return -ENOTSUP;
748 	}
749 	sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
750 	sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
751 	return 0;
752 }
753 
754 static int
755 ccp_configure_session_auth(struct ccp_session *sess,
756 			   const struct rte_crypto_sym_xform *xform)
757 {
758 	const struct rte_crypto_auth_xform *auth_xform = NULL;
759 	size_t i;
760 
761 	auth_xform = &xform->auth;
762 
763 	sess->auth.digest_length = auth_xform->digest_length;
764 	if (auth_xform->op ==  RTE_CRYPTO_AUTH_OP_GENERATE)
765 		sess->auth.op = CCP_AUTH_OP_GENERATE;
766 	else
767 		sess->auth.op = CCP_AUTH_OP_VERIFY;
768 	switch (auth_xform->algo) {
769 	case RTE_CRYPTO_AUTH_MD5_HMAC:
770 		if (sess->auth_opt) {
771 			sess->auth.algo = CCP_AUTH_ALGO_MD5_HMAC;
772 			sess->auth.offset = ((CCP_SB_BYTES << 1) -
773 					     MD5_DIGEST_SIZE);
774 			sess->auth.key_length = auth_xform->key.length;
775 			sess->auth.block_size = MD5_BLOCK_SIZE;
776 			memset(sess->auth.key, 0, sess->auth.block_size);
777 			rte_memcpy(sess->auth.key, auth_xform->key.data,
778 				   auth_xform->key.length);
779 		} else
780 			return -1; /* HMAC MD5 not supported on CCP */
781 		break;
782 	case RTE_CRYPTO_AUTH_SHA1:
783 		sess->auth.engine = CCP_ENGINE_SHA;
784 		sess->auth.algo = CCP_AUTH_ALGO_SHA1;
785 		sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
786 		sess->auth.ctx = (void *)ccp_sha1_init;
787 		sess->auth.ctx_len = CCP_SB_BYTES;
788 		sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
789 		break;
790 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
791 		if (sess->auth_opt) {
792 			if (auth_xform->key.length > SHA1_BLOCK_SIZE)
793 				return -1;
794 			sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
795 			sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
796 			sess->auth.block_size = SHA1_BLOCK_SIZE;
797 			sess->auth.key_length = auth_xform->key.length;
798 			memset(sess->auth.key, 0, sess->auth.block_size);
799 			rte_memcpy(sess->auth.key, auth_xform->key.data,
800 				   auth_xform->key.length);
801 		} else {
802 			if (auth_xform->key.length > SHA1_BLOCK_SIZE)
803 				return -1;
804 			sess->auth.engine = CCP_ENGINE_SHA;
805 			sess->auth.algo = CCP_AUTH_ALGO_SHA1_HMAC;
806 			sess->auth.ut.sha_type = CCP_SHA_TYPE_1;
807 			sess->auth.ctx_len = CCP_SB_BYTES;
808 			sess->auth.offset = CCP_SB_BYTES - SHA1_DIGEST_SIZE;
809 			sess->auth.block_size = SHA1_BLOCK_SIZE;
810 			sess->auth.key_length = auth_xform->key.length;
811 			memset(sess->auth.key, 0, sess->auth.block_size);
812 			memset(sess->auth.pre_compute, 0,
813 			       sess->auth.ctx_len << 1);
814 			rte_memcpy(sess->auth.key, auth_xform->key.data,
815 				   auth_xform->key.length);
816 			if (generate_partial_hash(sess))
817 				return -1;
818 		}
819 		break;
820 	case RTE_CRYPTO_AUTH_SHA224:
821 		sess->auth.algo = CCP_AUTH_ALGO_SHA224;
822 		sess->auth.engine = CCP_ENGINE_SHA;
823 		sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
824 		sess->auth.ctx = (void *)ccp_sha224_init;
825 		sess->auth.ctx_len = CCP_SB_BYTES;
826 		sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
827 		break;
828 	case RTE_CRYPTO_AUTH_SHA224_HMAC:
829 		if (sess->auth_opt) {
830 			if (auth_xform->key.length > SHA224_BLOCK_SIZE)
831 				return -1;
832 			sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
833 			sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
834 			sess->auth.block_size = SHA224_BLOCK_SIZE;
835 			sess->auth.key_length = auth_xform->key.length;
836 			memset(sess->auth.key, 0, sess->auth.block_size);
837 			rte_memcpy(sess->auth.key, auth_xform->key.data,
838 				   auth_xform->key.length);
839 		} else {
840 			if (auth_xform->key.length > SHA224_BLOCK_SIZE)
841 				return -1;
842 			sess->auth.algo = CCP_AUTH_ALGO_SHA224_HMAC;
843 			sess->auth.engine = CCP_ENGINE_SHA;
844 			sess->auth.ut.sha_type = CCP_SHA_TYPE_224;
845 			sess->auth.ctx_len = CCP_SB_BYTES;
846 			sess->auth.offset = CCP_SB_BYTES - SHA224_DIGEST_SIZE;
847 			sess->auth.block_size = SHA224_BLOCK_SIZE;
848 			sess->auth.key_length = auth_xform->key.length;
849 			memset(sess->auth.key, 0, sess->auth.block_size);
850 			memset(sess->auth.pre_compute, 0,
851 			       sess->auth.ctx_len << 1);
852 			rte_memcpy(sess->auth.key, auth_xform->key.data,
853 				   auth_xform->key.length);
854 			if (generate_partial_hash(sess))
855 				return -1;
856 		}
857 		break;
858 	case RTE_CRYPTO_AUTH_SHA3_224:
859 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_224;
860 		sess->auth.engine = CCP_ENGINE_SHA;
861 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
862 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
863 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
864 		break;
865 	case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
866 		if (auth_xform->key.length > SHA3_224_BLOCK_SIZE)
867 			return -1;
868 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_224_HMAC;
869 		sess->auth.engine = CCP_ENGINE_SHA;
870 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_224;
871 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
872 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA224_DIGEST_SIZE;
873 		sess->auth.block_size = SHA3_224_BLOCK_SIZE;
874 		sess->auth.key_length = auth_xform->key.length;
875 		memset(sess->auth.key, 0, sess->auth.block_size);
876 		memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
877 		rte_memcpy(sess->auth.key, auth_xform->key.data,
878 			   auth_xform->key.length);
879 		if (generate_partial_hash(sess))
880 			return -1;
881 		break;
882 	case RTE_CRYPTO_AUTH_SHA256:
883 		sess->auth.algo = CCP_AUTH_ALGO_SHA256;
884 		sess->auth.engine = CCP_ENGINE_SHA;
885 		sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
886 		sess->auth.ctx = (void *)ccp_sha256_init;
887 		sess->auth.ctx_len = CCP_SB_BYTES;
888 		sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
889 		break;
890 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
891 		if (sess->auth_opt) {
892 			if (auth_xform->key.length > SHA256_BLOCK_SIZE)
893 				return -1;
894 			sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
895 			sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
896 			sess->auth.block_size = SHA256_BLOCK_SIZE;
897 			sess->auth.key_length = auth_xform->key.length;
898 			memset(sess->auth.key, 0, sess->auth.block_size);
899 			rte_memcpy(sess->auth.key, auth_xform->key.data,
900 				   auth_xform->key.length);
901 		} else {
902 			if (auth_xform->key.length > SHA256_BLOCK_SIZE)
903 				return -1;
904 			sess->auth.algo = CCP_AUTH_ALGO_SHA256_HMAC;
905 			sess->auth.engine = CCP_ENGINE_SHA;
906 			sess->auth.ut.sha_type = CCP_SHA_TYPE_256;
907 			sess->auth.ctx_len = CCP_SB_BYTES;
908 			sess->auth.offset = CCP_SB_BYTES - SHA256_DIGEST_SIZE;
909 			sess->auth.block_size = SHA256_BLOCK_SIZE;
910 			sess->auth.key_length = auth_xform->key.length;
911 			memset(sess->auth.key, 0, sess->auth.block_size);
912 			memset(sess->auth.pre_compute, 0,
913 			       sess->auth.ctx_len << 1);
914 			rte_memcpy(sess->auth.key, auth_xform->key.data,
915 				   auth_xform->key.length);
916 			if (generate_partial_hash(sess))
917 				return -1;
918 		}
919 		break;
920 	case RTE_CRYPTO_AUTH_SHA3_256:
921 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_256;
922 		sess->auth.engine = CCP_ENGINE_SHA;
923 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
924 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
925 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
926 		break;
927 	case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
928 		if (auth_xform->key.length > SHA3_256_BLOCK_SIZE)
929 			return -1;
930 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_256_HMAC;
931 		sess->auth.engine = CCP_ENGINE_SHA;
932 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_256;
933 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
934 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA256_DIGEST_SIZE;
935 		sess->auth.block_size = SHA3_256_BLOCK_SIZE;
936 		sess->auth.key_length = auth_xform->key.length;
937 		memset(sess->auth.key, 0, sess->auth.block_size);
938 		memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
939 		rte_memcpy(sess->auth.key, auth_xform->key.data,
940 			   auth_xform->key.length);
941 		if (generate_partial_hash(sess))
942 			return -1;
943 		break;
944 	case RTE_CRYPTO_AUTH_SHA384:
945 		sess->auth.algo = CCP_AUTH_ALGO_SHA384;
946 		sess->auth.engine = CCP_ENGINE_SHA;
947 		sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
948 		sess->auth.ctx = (void *)ccp_sha384_init;
949 		sess->auth.ctx_len = CCP_SB_BYTES << 1;
950 		sess->auth.offset = (CCP_SB_BYTES << 1) - SHA384_DIGEST_SIZE;
951 		break;
952 	case RTE_CRYPTO_AUTH_SHA384_HMAC:
953 		if (sess->auth_opt) {
954 			if (auth_xform->key.length > SHA384_BLOCK_SIZE)
955 				return -1;
956 			sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
957 			sess->auth.offset = ((CCP_SB_BYTES << 1) -
958 					     SHA384_DIGEST_SIZE);
959 			sess->auth.block_size = SHA384_BLOCK_SIZE;
960 			sess->auth.key_length = auth_xform->key.length;
961 			memset(sess->auth.key, 0, sess->auth.block_size);
962 			rte_memcpy(sess->auth.key, auth_xform->key.data,
963 				   auth_xform->key.length);
964 		} else {
965 			if (auth_xform->key.length > SHA384_BLOCK_SIZE)
966 				return -1;
967 			sess->auth.algo = CCP_AUTH_ALGO_SHA384_HMAC;
968 			sess->auth.engine = CCP_ENGINE_SHA;
969 			sess->auth.ut.sha_type = CCP_SHA_TYPE_384;
970 			sess->auth.ctx_len = CCP_SB_BYTES << 1;
971 			sess->auth.offset = ((CCP_SB_BYTES << 1) -
972 					     SHA384_DIGEST_SIZE);
973 			sess->auth.block_size = SHA384_BLOCK_SIZE;
974 			sess->auth.key_length = auth_xform->key.length;
975 			memset(sess->auth.key, 0, sess->auth.block_size);
976 			memset(sess->auth.pre_compute, 0,
977 			       sess->auth.ctx_len << 1);
978 			rte_memcpy(sess->auth.key, auth_xform->key.data,
979 				   auth_xform->key.length);
980 			if (generate_partial_hash(sess))
981 				return -1;
982 		}
983 		break;
984 	case RTE_CRYPTO_AUTH_SHA3_384:
985 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_384;
986 		sess->auth.engine = CCP_ENGINE_SHA;
987 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
988 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
989 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
990 		break;
991 	case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
992 		if (auth_xform->key.length > SHA3_384_BLOCK_SIZE)
993 			return -1;
994 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_384_HMAC;
995 		sess->auth.engine = CCP_ENGINE_SHA;
996 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_384;
997 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
998 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA384_DIGEST_SIZE;
999 		sess->auth.block_size = SHA3_384_BLOCK_SIZE;
1000 		sess->auth.key_length = auth_xform->key.length;
1001 		memset(sess->auth.key, 0, sess->auth.block_size);
1002 		memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1003 		rte_memcpy(sess->auth.key, auth_xform->key.data,
1004 			   auth_xform->key.length);
1005 		if (generate_partial_hash(sess))
1006 			return -1;
1007 		break;
1008 	case RTE_CRYPTO_AUTH_SHA512:
1009 		sess->auth.algo = CCP_AUTH_ALGO_SHA512;
1010 		sess->auth.engine = CCP_ENGINE_SHA;
1011 		sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1012 		sess->auth.ctx = (void *)ccp_sha512_init;
1013 		sess->auth.ctx_len = CCP_SB_BYTES << 1;
1014 		sess->auth.offset = (CCP_SB_BYTES << 1) - SHA512_DIGEST_SIZE;
1015 		break;
1016 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
1017 		if (sess->auth_opt) {
1018 			if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1019 				return -1;
1020 			sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1021 			sess->auth.offset = ((CCP_SB_BYTES << 1) -
1022 					     SHA512_DIGEST_SIZE);
1023 			sess->auth.block_size = SHA512_BLOCK_SIZE;
1024 			sess->auth.key_length = auth_xform->key.length;
1025 			memset(sess->auth.key, 0, sess->auth.block_size);
1026 			rte_memcpy(sess->auth.key, auth_xform->key.data,
1027 				   auth_xform->key.length);
1028 		} else {
1029 			if (auth_xform->key.length > SHA512_BLOCK_SIZE)
1030 				return -1;
1031 			sess->auth.algo = CCP_AUTH_ALGO_SHA512_HMAC;
1032 			sess->auth.engine = CCP_ENGINE_SHA;
1033 			sess->auth.ut.sha_type = CCP_SHA_TYPE_512;
1034 			sess->auth.ctx_len = CCP_SB_BYTES << 1;
1035 			sess->auth.offset = ((CCP_SB_BYTES << 1) -
1036 					     SHA512_DIGEST_SIZE);
1037 			sess->auth.block_size = SHA512_BLOCK_SIZE;
1038 			sess->auth.key_length = auth_xform->key.length;
1039 			memset(sess->auth.key, 0, sess->auth.block_size);
1040 			memset(sess->auth.pre_compute, 0,
1041 			       sess->auth.ctx_len << 1);
1042 			rte_memcpy(sess->auth.key, auth_xform->key.data,
1043 				   auth_xform->key.length);
1044 			if (generate_partial_hash(sess))
1045 				return -1;
1046 		}
1047 		break;
1048 	case RTE_CRYPTO_AUTH_SHA3_512:
1049 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_512;
1050 		sess->auth.engine = CCP_ENGINE_SHA;
1051 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1052 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1053 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1054 		break;
1055 	case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
1056 		if (auth_xform->key.length > SHA3_512_BLOCK_SIZE)
1057 			return -1;
1058 		sess->auth.algo = CCP_AUTH_ALGO_SHA3_512_HMAC;
1059 		sess->auth.engine = CCP_ENGINE_SHA;
1060 		sess->auth.ut.sha_type = CCP_SHA3_TYPE_512;
1061 		sess->auth.ctx_len = CCP_SHA3_CTX_SIZE;
1062 		sess->auth.offset = CCP_SHA3_CTX_SIZE - SHA512_DIGEST_SIZE;
1063 		sess->auth.block_size = SHA3_512_BLOCK_SIZE;
1064 		sess->auth.key_length = auth_xform->key.length;
1065 		memset(sess->auth.key, 0, sess->auth.block_size);
1066 		memset(sess->auth.pre_compute, 0, 2 * sess->auth.ctx_len);
1067 		rte_memcpy(sess->auth.key, auth_xform->key.data,
1068 			   auth_xform->key.length);
1069 		if (generate_partial_hash(sess))
1070 			return -1;
1071 		break;
1072 	case RTE_CRYPTO_AUTH_AES_CMAC:
1073 		sess->auth.algo = CCP_AUTH_ALGO_AES_CMAC;
1074 		sess->auth.engine = CCP_ENGINE_AES;
1075 		sess->auth.um.aes_mode = CCP_AES_MODE_CMAC;
1076 		sess->auth.key_length = auth_xform->key.length;
1077 		/* padding and hash result */
1078 		sess->auth.ctx_len = CCP_SB_BYTES << 1;
1079 		sess->auth.offset = AES_BLOCK_SIZE;
1080 		sess->auth.block_size = AES_BLOCK_SIZE;
1081 		if (sess->auth.key_length == 16)
1082 			sess->auth.ut.aes_type = CCP_AES_TYPE_128;
1083 		else if (sess->auth.key_length == 24)
1084 			sess->auth.ut.aes_type = CCP_AES_TYPE_192;
1085 		else if (sess->auth.key_length == 32)
1086 			sess->auth.ut.aes_type = CCP_AES_TYPE_256;
1087 		else {
1088 			CCP_LOG_ERR("Invalid CMAC key length");
1089 			return -1;
1090 		}
1091 		rte_memcpy(sess->auth.key, auth_xform->key.data,
1092 			   sess->auth.key_length);
1093 		for (i = 0; i < sess->auth.key_length; i++)
1094 			sess->auth.key_ccp[sess->auth.key_length - i - 1] =
1095 				sess->auth.key[i];
1096 		if (generate_cmac_subkeys(sess))
1097 			return -1;
1098 		break;
1099 	default:
1100 		CCP_LOG_ERR("Unsupported hash algo");
1101 		return -ENOTSUP;
1102 	}
1103 	return 0;
1104 }
1105 
1106 static int
1107 ccp_configure_session_aead(struct ccp_session *sess,
1108 			   const struct rte_crypto_sym_xform *xform)
1109 {
1110 	const struct rte_crypto_aead_xform *aead_xform = NULL;
1111 	size_t i;
1112 
1113 	aead_xform = &xform->aead;
1114 
1115 	sess->cipher.key_length = aead_xform->key.length;
1116 	rte_memcpy(sess->cipher.key, aead_xform->key.data,
1117 		   aead_xform->key.length);
1118 
1119 	if (aead_xform->op == RTE_CRYPTO_AEAD_OP_ENCRYPT) {
1120 		sess->cipher.dir = CCP_CIPHER_DIR_ENCRYPT;
1121 		sess->auth.op = CCP_AUTH_OP_GENERATE;
1122 	} else {
1123 		sess->cipher.dir = CCP_CIPHER_DIR_DECRYPT;
1124 		sess->auth.op = CCP_AUTH_OP_VERIFY;
1125 	}
1126 	sess->aead_algo = aead_xform->algo;
1127 	sess->auth.aad_length = aead_xform->aad_length;
1128 	sess->auth.digest_length = aead_xform->digest_length;
1129 
1130 	/* set iv parameters */
1131 	sess->iv.offset = aead_xform->iv.offset;
1132 	sess->iv.length = aead_xform->iv.length;
1133 
1134 	switch (aead_xform->algo) {
1135 	case RTE_CRYPTO_AEAD_AES_GCM:
1136 		sess->cipher.algo = CCP_CIPHER_ALGO_AES_GCM;
1137 		sess->cipher.um.aes_mode = CCP_AES_MODE_GCTR;
1138 		sess->cipher.engine = CCP_ENGINE_AES;
1139 		if (sess->cipher.key_length == 16)
1140 			sess->cipher.ut.aes_type = CCP_AES_TYPE_128;
1141 		else if (sess->cipher.key_length == 24)
1142 			sess->cipher.ut.aes_type = CCP_AES_TYPE_192;
1143 		else if (sess->cipher.key_length == 32)
1144 			sess->cipher.ut.aes_type = CCP_AES_TYPE_256;
1145 		else {
1146 			CCP_LOG_ERR("Invalid aead key length");
1147 			return -1;
1148 		}
1149 		for (i = 0; i < sess->cipher.key_length; i++)
1150 			sess->cipher.key_ccp[sess->cipher.key_length - i - 1] =
1151 				sess->cipher.key[i];
1152 		sess->auth.algo = CCP_AUTH_ALGO_AES_GCM;
1153 		sess->auth.engine = CCP_ENGINE_AES;
1154 		sess->auth.um.aes_mode = CCP_AES_MODE_GHASH;
1155 		sess->auth.ctx_len = CCP_SB_BYTES;
1156 		sess->auth.offset = 0;
1157 		sess->auth.block_size = AES_BLOCK_SIZE;
1158 		sess->cmd_id = CCP_CMD_COMBINED;
1159 		break;
1160 	default:
1161 		CCP_LOG_ERR("Unsupported aead algo");
1162 		return -ENOTSUP;
1163 	}
1164 	sess->cipher.nonce_phys = rte_mem_virt2iova(sess->cipher.nonce);
1165 	sess->cipher.key_phys = rte_mem_virt2iova(sess->cipher.key_ccp);
1166 	return 0;
1167 }
1168 
1169 int
1170 ccp_set_session_parameters(struct ccp_session *sess,
1171 			   const struct rte_crypto_sym_xform *xform,
1172 			   struct ccp_private *internals)
1173 {
1174 	const struct rte_crypto_sym_xform *cipher_xform = NULL;
1175 	const struct rte_crypto_sym_xform *auth_xform = NULL;
1176 	const struct rte_crypto_sym_xform *aead_xform = NULL;
1177 	int ret = 0;
1178 
1179 	sess->auth_opt = internals->auth_opt;
1180 	sess->cmd_id = ccp_get_cmd_id(xform);
1181 
1182 	switch (sess->cmd_id) {
1183 	case CCP_CMD_CIPHER:
1184 		cipher_xform = xform;
1185 		break;
1186 	case CCP_CMD_AUTH:
1187 		auth_xform = xform;
1188 		break;
1189 	case CCP_CMD_CIPHER_HASH:
1190 		cipher_xform = xform;
1191 		auth_xform = xform->next;
1192 		break;
1193 	case CCP_CMD_HASH_CIPHER:
1194 		auth_xform = xform;
1195 		cipher_xform = xform->next;
1196 		break;
1197 	case CCP_CMD_COMBINED:
1198 		aead_xform = xform;
1199 		break;
1200 	default:
1201 		CCP_LOG_ERR("Unsupported cmd_id");
1202 		return -1;
1203 	}
1204 
1205 	/* Default IV length = 0 */
1206 	sess->iv.length = 0;
1207 	if (cipher_xform) {
1208 		ret = ccp_configure_session_cipher(sess, cipher_xform);
1209 		if (ret != 0) {
1210 			CCP_LOG_ERR("Invalid/unsupported cipher parameters");
1211 			return ret;
1212 		}
1213 	}
1214 	if (auth_xform) {
1215 		ret = ccp_configure_session_auth(sess, auth_xform);
1216 		if (ret != 0) {
1217 			CCP_LOG_ERR("Invalid/unsupported auth parameters");
1218 			return ret;
1219 		}
1220 	}
1221 	if (aead_xform) {
1222 		ret = ccp_configure_session_aead(sess, aead_xform);
1223 		if (ret != 0) {
1224 			CCP_LOG_ERR("Invalid/unsupported aead parameters");
1225 			return ret;
1226 		}
1227 	}
1228 	return ret;
1229 }
1230 
1231 /* calculate CCP descriptors requirement */
1232 static inline int
1233 ccp_cipher_slot(struct ccp_session *session)
1234 {
1235 	int count = 0;
1236 
1237 	switch (session->cipher.algo) {
1238 	case CCP_CIPHER_ALGO_AES_CBC:
1239 		count = 2;
1240 		/**< op + passthrough for iv */
1241 		break;
1242 	case CCP_CIPHER_ALGO_AES_ECB:
1243 		count = 1;
1244 		/**<only op*/
1245 		break;
1246 	case CCP_CIPHER_ALGO_AES_CTR:
1247 		count = 2;
1248 		/**< op + passthrough for iv */
1249 		break;
1250 	case CCP_CIPHER_ALGO_3DES_CBC:
1251 		count = 2;
1252 		/**< op + passthrough for iv */
1253 		break;
1254 	default:
1255 		CCP_LOG_ERR("Unsupported cipher algo %d",
1256 			    session->cipher.algo);
1257 	}
1258 	return count;
1259 }
1260 
1261 static inline int
1262 ccp_auth_slot(struct ccp_session *session)
1263 {
1264 	int count = 0;
1265 
1266 	switch (session->auth.algo) {
1267 	case CCP_AUTH_ALGO_SHA1:
1268 	case CCP_AUTH_ALGO_SHA224:
1269 	case CCP_AUTH_ALGO_SHA256:
1270 	case CCP_AUTH_ALGO_SHA384:
1271 	case CCP_AUTH_ALGO_SHA512:
1272 		count = 3;
1273 		/**< op + lsb passthrough cpy to/from*/
1274 		break;
1275 	case CCP_AUTH_ALGO_MD5_HMAC:
1276 		break;
1277 	case CCP_AUTH_ALGO_SHA1_HMAC:
1278 	case CCP_AUTH_ALGO_SHA224_HMAC:
1279 	case CCP_AUTH_ALGO_SHA256_HMAC:
1280 		if (session->auth_opt == 0)
1281 			count = 6;
1282 		break;
1283 	case CCP_AUTH_ALGO_SHA384_HMAC:
1284 	case CCP_AUTH_ALGO_SHA512_HMAC:
1285 		/**
1286 		 * 1. Load PHash1 = H(k ^ ipad); to LSB
1287 		 * 2. generate IHash = H(hash on message with PHash1
1288 		 * as init values);
1289 		 * 3. Retrieve IHash 2 slots for 384/512
1290 		 * 4. Load Phash2 = H(k ^ opad); to LSB
1291 		 * 5. generate FHash = H(hash on Ihash with Phash2
1292 		 * as init value);
1293 		 * 6. Retrieve HMAC output from LSB to host memory
1294 		 */
1295 		if (session->auth_opt == 0)
1296 			count = 7;
1297 		break;
1298 	case CCP_AUTH_ALGO_SHA3_224:
1299 	case CCP_AUTH_ALGO_SHA3_256:
1300 	case CCP_AUTH_ALGO_SHA3_384:
1301 	case CCP_AUTH_ALGO_SHA3_512:
1302 		count = 1;
1303 		/**< only op ctx and dst in host memory*/
1304 		break;
1305 	case CCP_AUTH_ALGO_SHA3_224_HMAC:
1306 	case CCP_AUTH_ALGO_SHA3_256_HMAC:
1307 		count = 3;
1308 		break;
1309 	case CCP_AUTH_ALGO_SHA3_384_HMAC:
1310 	case CCP_AUTH_ALGO_SHA3_512_HMAC:
1311 		count = 4;
1312 		/**
1313 		 * 1. Op to Perform Ihash
1314 		 * 2. Retrieve result from LSB to host memory
1315 		 * 3. Perform final hash
1316 		 */
1317 		break;
1318 	case CCP_AUTH_ALGO_AES_CMAC:
1319 		count = 4;
1320 		/**
1321 		 * op
1322 		 * extra descriptor in padding case
1323 		 * (k1/k2(255:128) with iv(127:0))
1324 		 * Retrieve result
1325 		 */
1326 		break;
1327 	default:
1328 		CCP_LOG_ERR("Unsupported auth algo %d",
1329 			    session->auth.algo);
1330 	}
1331 
1332 	return count;
1333 }
1334 
1335 static int
1336 ccp_aead_slot(struct ccp_session *session)
1337 {
1338 	int count = 0;
1339 
1340 	switch (session->aead_algo) {
1341 	case RTE_CRYPTO_AEAD_AES_GCM:
1342 		break;
1343 	default:
1344 		CCP_LOG_ERR("Unsupported aead algo %d",
1345 			    session->aead_algo);
1346 	}
1347 	switch (session->auth.algo) {
1348 	case CCP_AUTH_ALGO_AES_GCM:
1349 		count = 5;
1350 		/**
1351 		 * 1. Passthru iv
1352 		 * 2. Hash AAD
1353 		 * 3. GCTR
1354 		 * 4. Reload passthru
1355 		 * 5. Hash Final tag
1356 		 */
1357 		break;
1358 	default:
1359 		CCP_LOG_ERR("Unsupported combined auth ALGO %d",
1360 			    session->auth.algo);
1361 	}
1362 	return count;
1363 }
1364 
1365 int
1366 ccp_compute_slot_count(struct ccp_session *session)
1367 {
1368 	int count = 0;
1369 
1370 	switch (session->cmd_id) {
1371 	case CCP_CMD_CIPHER:
1372 		count = ccp_cipher_slot(session);
1373 		break;
1374 	case CCP_CMD_AUTH:
1375 		count = ccp_auth_slot(session);
1376 		break;
1377 	case CCP_CMD_CIPHER_HASH:
1378 	case CCP_CMD_HASH_CIPHER:
1379 		count = ccp_cipher_slot(session);
1380 		count += ccp_auth_slot(session);
1381 		break;
1382 	case CCP_CMD_COMBINED:
1383 		count = ccp_aead_slot(session);
1384 		break;
1385 	default:
1386 		CCP_LOG_ERR("Unsupported cmd_id");
1387 
1388 	}
1389 
1390 	return count;
1391 }
1392 
1393 static uint8_t
1394 algo_select(int sessalgo,
1395 	    const EVP_MD **algo)
1396 {
1397 	int res = 0;
1398 
1399 	switch (sessalgo) {
1400 	case CCP_AUTH_ALGO_MD5_HMAC:
1401 		*algo = EVP_md5();
1402 		break;
1403 	case CCP_AUTH_ALGO_SHA1_HMAC:
1404 		*algo = EVP_sha1();
1405 		break;
1406 	case CCP_AUTH_ALGO_SHA224_HMAC:
1407 		*algo = EVP_sha224();
1408 		break;
1409 	case CCP_AUTH_ALGO_SHA256_HMAC:
1410 		*algo = EVP_sha256();
1411 		break;
1412 	case CCP_AUTH_ALGO_SHA384_HMAC:
1413 		*algo = EVP_sha384();
1414 		break;
1415 	case CCP_AUTH_ALGO_SHA512_HMAC:
1416 		*algo = EVP_sha512();
1417 		break;
1418 	default:
1419 		res = -EINVAL;
1420 		break;
1421 	}
1422 	return res;
1423 }
1424 
1425 static int
1426 process_cpu_auth_hmac(uint8_t *src, uint8_t *dst,
1427 		      __rte_unused uint8_t *iv,
1428 		      EVP_PKEY *pkey,
1429 		      int srclen,
1430 		      EVP_MD_CTX *ctx,
1431 		      const EVP_MD *algo,
1432 		      uint16_t d_len)
1433 {
1434 	size_t dstlen;
1435 	unsigned char temp_dst[64];
1436 
1437 	if (EVP_DigestSignInit(ctx, NULL, algo, NULL, pkey) <= 0)
1438 		goto process_auth_err;
1439 
1440 	if (EVP_DigestSignUpdate(ctx, (char *)src, srclen) <= 0)
1441 		goto process_auth_err;
1442 
1443 	if (EVP_DigestSignFinal(ctx, temp_dst, &dstlen) <= 0)
1444 		goto process_auth_err;
1445 
1446 	memcpy(dst, temp_dst, d_len);
1447 	return 0;
1448 process_auth_err:
1449 	CCP_LOG_ERR("Process cpu auth failed");
1450 	return -EINVAL;
1451 }
1452 
1453 static int cpu_crypto_auth(struct ccp_qp *qp,
1454 			   struct rte_crypto_op *op,
1455 			   struct ccp_session *sess,
1456 			   EVP_MD_CTX *ctx)
1457 {
1458 	uint8_t *src, *dst;
1459 	int srclen, status;
1460 	struct rte_mbuf *mbuf_src, *mbuf_dst;
1461 	const EVP_MD *algo = NULL;
1462 	EVP_PKEY *pkey;
1463 
1464 	algo_select(sess->auth.algo, &algo);
1465 	pkey = EVP_PKEY_new_mac_key(EVP_PKEY_HMAC, NULL, sess->auth.key,
1466 				    sess->auth.key_length);
1467 	mbuf_src = op->sym->m_src;
1468 	mbuf_dst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
1469 	srclen = op->sym->auth.data.length;
1470 	src = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
1471 				      op->sym->auth.data.offset);
1472 
1473 	if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1474 		dst = qp->temp_digest;
1475 	} else {
1476 		dst = op->sym->auth.digest.data;
1477 		if (dst == NULL) {
1478 			dst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
1479 						     op->sym->auth.data.offset +
1480 						     sess->auth.digest_length);
1481 		}
1482 	}
1483 	status = process_cpu_auth_hmac(src, dst, NULL,
1484 				       pkey, srclen,
1485 				       ctx,
1486 				       algo,
1487 				       sess->auth.digest_length);
1488 	if (status) {
1489 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1490 		return status;
1491 	}
1492 
1493 	if (sess->auth.op == CCP_AUTH_OP_VERIFY) {
1494 		if (memcmp(dst, op->sym->auth.digest.data,
1495 			   sess->auth.digest_length) != 0) {
1496 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
1497 		} else {
1498 			op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1499 		}
1500 	} else {
1501 		op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
1502 	}
1503 	EVP_PKEY_free(pkey);
1504 	return 0;
1505 }
1506 
1507 static void
1508 ccp_perform_passthru(struct ccp_passthru *pst,
1509 		     struct ccp_queue *cmd_q)
1510 {
1511 	struct ccp_desc *desc;
1512 	union ccp_function function;
1513 
1514 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1515 
1516 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_PASSTHRU;
1517 
1518 	CCP_CMD_SOC(desc) = 0;
1519 	CCP_CMD_IOC(desc) = 0;
1520 	CCP_CMD_INIT(desc) = 0;
1521 	CCP_CMD_EOM(desc) = 0;
1522 	CCP_CMD_PROT(desc) = 0;
1523 
1524 	function.raw = 0;
1525 	CCP_PT_BYTESWAP(&function) = pst->byte_swap;
1526 	CCP_PT_BITWISE(&function) = pst->bit_mod;
1527 	CCP_CMD_FUNCTION(desc) = function.raw;
1528 
1529 	CCP_CMD_LEN(desc) = pst->len;
1530 
1531 	if (pst->dir) {
1532 		CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1533 		CCP_CMD_SRC_HI(desc) = high32_value(pst->src_addr);
1534 		CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1535 
1536 		CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1537 		CCP_CMD_DST_HI(desc) = 0;
1538 		CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1539 
1540 		if (pst->bit_mod != CCP_PASSTHRU_BITWISE_NOOP)
1541 			CCP_CMD_LSB_ID(desc) = cmd_q->sb_key;
1542 	} else {
1543 
1544 		CCP_CMD_SRC_LO(desc) = (uint32_t)(pst->src_addr);
1545 		CCP_CMD_SRC_HI(desc) = 0;
1546 		CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SB;
1547 
1548 		CCP_CMD_DST_LO(desc) = (uint32_t)(pst->dest_addr);
1549 		CCP_CMD_DST_HI(desc) = high32_value(pst->dest_addr);
1550 		CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1551 	}
1552 
1553 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1554 }
1555 
1556 static int
1557 ccp_perform_hmac(struct rte_crypto_op *op,
1558 		 struct ccp_queue *cmd_q)
1559 {
1560 
1561 	struct ccp_session *session;
1562 	union ccp_function function;
1563 	struct ccp_desc *desc;
1564 	uint32_t tail;
1565 	phys_addr_t src_addr, dest_addr, dest_addr_t;
1566 	struct ccp_passthru pst;
1567 	uint64_t auth_msg_bits;
1568 	void *append_ptr;
1569 	uint8_t *addr;
1570 
1571 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1572 	addr = session->auth.pre_compute;
1573 
1574 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1575 					      op->sym->auth.data.offset);
1576 	append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1577 						session->auth.ctx_len);
1578 	dest_addr_t = dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1579 	pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1580 
1581 	/** Load PHash1 to LSB*/
1582 	pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1583 	pst.len = session->auth.ctx_len;
1584 	pst.dir = 1;
1585 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1586 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1587 	ccp_perform_passthru(&pst, cmd_q);
1588 
1589 	/**sha engine command descriptor for IntermediateHash*/
1590 
1591 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1592 	memset(desc, 0, Q_DESC_SIZE);
1593 
1594 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1595 
1596 	CCP_CMD_SOC(desc) = 0;
1597 	CCP_CMD_IOC(desc) = 0;
1598 	CCP_CMD_INIT(desc) = 1;
1599 	CCP_CMD_EOM(desc) = 1;
1600 	CCP_CMD_PROT(desc) = 0;
1601 
1602 	function.raw = 0;
1603 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1604 	CCP_CMD_FUNCTION(desc) = function.raw;
1605 
1606 	CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1607 	auth_msg_bits = (op->sym->auth.data.length +
1608 			 session->auth.block_size)  * 8;
1609 
1610 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1611 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1612 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1613 
1614 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1615 	CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1616 	CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1617 
1618 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1619 
1620 	rte_wmb();
1621 
1622 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1623 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1624 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1625 		      cmd_q->qcontrol | CMD_Q_RUN);
1626 
1627 	/* Intermediate Hash value retrieve */
1628 	if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1629 	    (session->auth.ut.sha_type == CCP_SHA_TYPE_512)) {
1630 
1631 		pst.src_addr =
1632 			(phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1633 		pst.dest_addr = dest_addr_t;
1634 		pst.len = CCP_SB_BYTES;
1635 		pst.dir = 0;
1636 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1637 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1638 		ccp_perform_passthru(&pst, cmd_q);
1639 
1640 		pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1641 		pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1642 		pst.len = CCP_SB_BYTES;
1643 		pst.dir = 0;
1644 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1645 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1646 		ccp_perform_passthru(&pst, cmd_q);
1647 
1648 	} else {
1649 		pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1650 		pst.dest_addr = dest_addr_t;
1651 		pst.len = session->auth.ctx_len;
1652 		pst.dir = 0;
1653 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1654 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1655 		ccp_perform_passthru(&pst, cmd_q);
1656 
1657 	}
1658 
1659 	/** Load PHash2 to LSB*/
1660 	addr += session->auth.ctx_len;
1661 	pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)addr);
1662 	pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1663 	pst.len = session->auth.ctx_len;
1664 	pst.dir = 1;
1665 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1666 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1667 	ccp_perform_passthru(&pst, cmd_q);
1668 
1669 	/**sha engine command descriptor for FinalHash*/
1670 	dest_addr_t += session->auth.offset;
1671 
1672 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1673 	memset(desc, 0, Q_DESC_SIZE);
1674 
1675 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1676 
1677 	CCP_CMD_SOC(desc) = 0;
1678 	CCP_CMD_IOC(desc) = 0;
1679 	CCP_CMD_INIT(desc) = 1;
1680 	CCP_CMD_EOM(desc) = 1;
1681 	CCP_CMD_PROT(desc) = 0;
1682 
1683 	function.raw = 0;
1684 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1685 	CCP_CMD_FUNCTION(desc) = function.raw;
1686 
1687 	CCP_CMD_LEN(desc) = (session->auth.ctx_len -
1688 			     session->auth.offset);
1689 	auth_msg_bits = (session->auth.block_size +
1690 			 session->auth.ctx_len -
1691 			 session->auth.offset) * 8;
1692 
1693 	CCP_CMD_SRC_LO(desc) = (uint32_t)(dest_addr_t);
1694 	CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1695 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1696 
1697 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1698 	CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1699 	CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1700 
1701 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1702 
1703 	rte_wmb();
1704 
1705 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1706 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1707 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1708 		      cmd_q->qcontrol | CMD_Q_RUN);
1709 
1710 	/* Retrieve hmac output */
1711 	pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1712 	pst.dest_addr = dest_addr;
1713 	pst.len = session->auth.ctx_len;
1714 	pst.dir = 0;
1715 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1716 	if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1717 	    (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1718 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1719 	else
1720 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1721 	ccp_perform_passthru(&pst, cmd_q);
1722 
1723 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1724 	return 0;
1725 
1726 }
1727 
1728 static int
1729 ccp_perform_sha(struct rte_crypto_op *op,
1730 		struct ccp_queue *cmd_q)
1731 {
1732 	struct ccp_session *session;
1733 	union ccp_function function;
1734 	struct ccp_desc *desc;
1735 	uint32_t tail;
1736 	phys_addr_t src_addr, dest_addr;
1737 	struct ccp_passthru pst;
1738 	void *append_ptr;
1739 	uint64_t auth_msg_bits;
1740 
1741 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1742 
1743 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1744 					      op->sym->auth.data.offset);
1745 	append_ptr = (void *)rte_pktmbuf_append(op->sym->m_src,
1746 						session->auth.ctx_len);
1747 	pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)session->auth.ctx);
1748 	dest_addr = (phys_addr_t)rte_mem_virt2iova(append_ptr);
1749 
1750 	/** Passthru sha context*/
1751 
1752 	pst.dest_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1753 	pst.len = session->auth.ctx_len;
1754 	pst.dir = 1;
1755 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1756 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1757 	ccp_perform_passthru(&pst, cmd_q);
1758 
1759 	/**prepare sha command descriptor*/
1760 
1761 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1762 	memset(desc, 0, Q_DESC_SIZE);
1763 
1764 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1765 
1766 	CCP_CMD_SOC(desc) = 0;
1767 	CCP_CMD_IOC(desc) = 0;
1768 	CCP_CMD_INIT(desc) = 1;
1769 	CCP_CMD_EOM(desc) = 1;
1770 	CCP_CMD_PROT(desc) = 0;
1771 
1772 	function.raw = 0;
1773 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1774 	CCP_CMD_FUNCTION(desc) = function.raw;
1775 
1776 	CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1777 	auth_msg_bits = op->sym->auth.data.length * 8;
1778 
1779 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1780 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1781 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1782 
1783 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_sha;
1784 	CCP_CMD_SHA_LO(desc) = ((uint32_t)auth_msg_bits);
1785 	CCP_CMD_SHA_HI(desc) = high32_value(auth_msg_bits);
1786 
1787 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1788 
1789 	rte_wmb();
1790 
1791 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1792 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1793 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1794 		      cmd_q->qcontrol | CMD_Q_RUN);
1795 
1796 	/* Hash value retrieve */
1797 	pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1798 	pst.dest_addr = dest_addr;
1799 	pst.len = session->auth.ctx_len;
1800 	pst.dir = 0;
1801 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1802 	if ((session->auth.ut.sha_type == CCP_SHA_TYPE_384) ||
1803 	    (session->auth.ut.sha_type == CCP_SHA_TYPE_512))
1804 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
1805 	else
1806 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1807 	ccp_perform_passthru(&pst, cmd_q);
1808 
1809 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1810 	return 0;
1811 
1812 }
1813 
1814 static int
1815 ccp_perform_sha3_hmac(struct rte_crypto_op *op,
1816 		      struct ccp_queue *cmd_q)
1817 {
1818 	struct ccp_session *session;
1819 	struct ccp_passthru pst;
1820 	union ccp_function function;
1821 	struct ccp_desc *desc;
1822 	uint8_t *append_ptr;
1823 	uint32_t tail;
1824 	phys_addr_t src_addr, dest_addr, ctx_paddr, dest_addr_t;
1825 
1826 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1827 
1828 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1829 					      op->sym->auth.data.offset);
1830 	append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1831 						session->auth.ctx_len);
1832 	if (!append_ptr) {
1833 		CCP_LOG_ERR("CCP MBUF append failed\n");
1834 		return -1;
1835 	}
1836 	dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1837 	ctx_paddr = (phys_addr_t)rte_mem_virt2iova(session->auth.pre_compute);
1838 	dest_addr_t = dest_addr + (session->auth.ctx_len / 2);
1839 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1840 	memset(desc, 0, Q_DESC_SIZE);
1841 
1842 	/*desc1 for SHA3-Ihash operation */
1843 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1844 	CCP_CMD_INIT(desc) = 1;
1845 	CCP_CMD_EOM(desc) = 1;
1846 
1847 	function.raw = 0;
1848 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1849 	CCP_CMD_FUNCTION(desc) = function.raw;
1850 	CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1851 
1852 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1853 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1854 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1855 
1856 	CCP_CMD_DST_LO(desc) = (cmd_q->sb_sha * CCP_SB_BYTES);
1857 	CCP_CMD_DST_HI(desc) = 0;
1858 	CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SB;
1859 
1860 	CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1861 	CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1862 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1863 
1864 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1865 
1866 	rte_wmb();
1867 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1868 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1869 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1870 		      cmd_q->qcontrol | CMD_Q_RUN);
1871 
1872 	/* Intermediate Hash value retrieve */
1873 	if ((session->auth.ut.sha_type == CCP_SHA3_TYPE_384) ||
1874 	    (session->auth.ut.sha_type == CCP_SHA3_TYPE_512)) {
1875 
1876 		pst.src_addr =
1877 			(phys_addr_t)((cmd_q->sb_sha + 1) * CCP_SB_BYTES);
1878 		pst.dest_addr = dest_addr_t;
1879 		pst.len = CCP_SB_BYTES;
1880 		pst.dir = 0;
1881 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1882 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1883 		ccp_perform_passthru(&pst, cmd_q);
1884 
1885 		pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1886 		pst.dest_addr = dest_addr_t + CCP_SB_BYTES;
1887 		pst.len = CCP_SB_BYTES;
1888 		pst.dir = 0;
1889 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1890 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1891 		ccp_perform_passthru(&pst, cmd_q);
1892 
1893 	} else {
1894 		pst.src_addr = (phys_addr_t)(cmd_q->sb_sha * CCP_SB_BYTES);
1895 		pst.dest_addr = dest_addr_t;
1896 		pst.len = CCP_SB_BYTES;
1897 		pst.dir = 0;
1898 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
1899 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
1900 		ccp_perform_passthru(&pst, cmd_q);
1901 	}
1902 
1903 	/**sha engine command descriptor for FinalHash*/
1904 	ctx_paddr += CCP_SHA3_CTX_SIZE;
1905 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1906 	memset(desc, 0, Q_DESC_SIZE);
1907 
1908 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1909 	CCP_CMD_INIT(desc) = 1;
1910 	CCP_CMD_EOM(desc) = 1;
1911 
1912 	function.raw = 0;
1913 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1914 	CCP_CMD_FUNCTION(desc) = function.raw;
1915 
1916 	if (session->auth.ut.sha_type == CCP_SHA3_TYPE_224) {
1917 		dest_addr_t += (CCP_SB_BYTES - SHA224_DIGEST_SIZE);
1918 		CCP_CMD_LEN(desc) = SHA224_DIGEST_SIZE;
1919 	} else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_256) {
1920 		CCP_CMD_LEN(desc) = SHA256_DIGEST_SIZE;
1921 	} else if (session->auth.ut.sha_type == CCP_SHA3_TYPE_384) {
1922 		dest_addr_t += (2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE);
1923 		CCP_CMD_LEN(desc) = SHA384_DIGEST_SIZE;
1924 	} else {
1925 		CCP_CMD_LEN(desc) = SHA512_DIGEST_SIZE;
1926 	}
1927 
1928 	CCP_CMD_SRC_LO(desc) = ((uint32_t)dest_addr_t);
1929 	CCP_CMD_SRC_HI(desc) = high32_value(dest_addr_t);
1930 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1931 
1932 	CCP_CMD_DST_LO(desc) = (uint32_t)dest_addr;
1933 	CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1934 	CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1935 
1936 	CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
1937 	CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
1938 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1939 
1940 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
1941 
1942 	rte_wmb();
1943 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
1944 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
1945 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
1946 		      cmd_q->qcontrol | CMD_Q_RUN);
1947 
1948 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1949 	return 0;
1950 }
1951 
1952 static int
1953 ccp_perform_sha3(struct rte_crypto_op *op,
1954 		 struct ccp_queue *cmd_q)
1955 {
1956 	struct ccp_session *session;
1957 	union ccp_function function;
1958 	struct ccp_desc *desc;
1959 	uint8_t *ctx_addr = NULL, *append_ptr = NULL;
1960 	uint32_t tail;
1961 	phys_addr_t src_addr, dest_addr, ctx_paddr;
1962 
1963 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
1964 
1965 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
1966 					      op->sym->auth.data.offset);
1967 	append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
1968 						session->auth.ctx_len);
1969 	if (!append_ptr) {
1970 		CCP_LOG_ERR("CCP MBUF append failed\n");
1971 		return -1;
1972 	}
1973 	dest_addr = (phys_addr_t)rte_mem_virt2iova((void *)append_ptr);
1974 	ctx_paddr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
1975 
1976 	ctx_addr = session->auth.sha3_ctx;
1977 
1978 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
1979 	memset(desc, 0, Q_DESC_SIZE);
1980 
1981 	/* prepare desc for SHA3 operation */
1982 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_SHA;
1983 	CCP_CMD_INIT(desc) = 1;
1984 	CCP_CMD_EOM(desc) = 1;
1985 
1986 	function.raw = 0;
1987 	CCP_SHA_TYPE(&function) = session->auth.ut.sha_type;
1988 	CCP_CMD_FUNCTION(desc) = function.raw;
1989 
1990 	CCP_CMD_LEN(desc) = op->sym->auth.data.length;
1991 
1992 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
1993 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
1994 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1995 
1996 	CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
1997 	CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
1998 	CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
1999 
2000 	CCP_CMD_KEY_LO(desc) = ((uint32_t)ctx_paddr);
2001 	CCP_CMD_KEY_HI(desc) = high32_value(ctx_paddr);
2002 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2003 
2004 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2005 
2006 	rte_wmb();
2007 
2008 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2009 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2010 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2011 		      cmd_q->qcontrol | CMD_Q_RUN);
2012 
2013 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2014 	return 0;
2015 }
2016 
2017 static int
2018 ccp_perform_aes_cmac(struct rte_crypto_op *op,
2019 		     struct ccp_queue *cmd_q)
2020 {
2021 	struct ccp_session *session;
2022 	union ccp_function function;
2023 	struct ccp_passthru pst;
2024 	struct ccp_desc *desc;
2025 	uint32_t tail;
2026 	uint8_t *src_tb, *append_ptr, *ctx_addr;
2027 	phys_addr_t src_addr, dest_addr, key_addr;
2028 	int length, non_align_len;
2029 
2030 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2031 	key_addr = rte_mem_virt2phy(session->auth.key_ccp);
2032 
2033 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2034 					      op->sym->auth.data.offset);
2035 	append_ptr = (uint8_t *)rte_pktmbuf_append(op->sym->m_src,
2036 						session->auth.ctx_len);
2037 	dest_addr = (phys_addr_t)rte_mem_virt2phy((void *)append_ptr);
2038 
2039 	function.raw = 0;
2040 	CCP_AES_ENCRYPT(&function) = CCP_CIPHER_DIR_ENCRYPT;
2041 	CCP_AES_MODE(&function) = session->auth.um.aes_mode;
2042 	CCP_AES_TYPE(&function) = session->auth.ut.aes_type;
2043 
2044 	if (op->sym->auth.data.length % session->auth.block_size == 0) {
2045 
2046 		ctx_addr = session->auth.pre_compute;
2047 		memset(ctx_addr, 0, AES_BLOCK_SIZE);
2048 		pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2049 		pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2050 		pst.len = CCP_SB_BYTES;
2051 		pst.dir = 1;
2052 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2053 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2054 		ccp_perform_passthru(&pst, cmd_q);
2055 
2056 		desc = &cmd_q->qbase_desc[cmd_q->qidx];
2057 		memset(desc, 0, Q_DESC_SIZE);
2058 
2059 		/* prepare desc for aes-cmac command */
2060 		CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2061 		CCP_CMD_EOM(desc) = 1;
2062 		CCP_CMD_FUNCTION(desc) = function.raw;
2063 
2064 		CCP_CMD_LEN(desc) = op->sym->auth.data.length;
2065 		CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2066 		CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2067 		CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2068 
2069 		CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2070 		CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2071 		CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2072 		CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2073 
2074 		cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2075 
2076 		rte_wmb();
2077 
2078 		tail =
2079 		(uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2080 		CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2081 		CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2082 			      cmd_q->qcontrol | CMD_Q_RUN);
2083 	} else {
2084 		ctx_addr = session->auth.pre_compute + CCP_SB_BYTES;
2085 		memset(ctx_addr, 0, AES_BLOCK_SIZE);
2086 		pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *)ctx_addr);
2087 		pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2088 		pst.len = CCP_SB_BYTES;
2089 		pst.dir = 1;
2090 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2091 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2092 		ccp_perform_passthru(&pst, cmd_q);
2093 
2094 		length = (op->sym->auth.data.length / AES_BLOCK_SIZE);
2095 		length *= AES_BLOCK_SIZE;
2096 		non_align_len = op->sym->auth.data.length - length;
2097 		/* prepare desc for aes-cmac command */
2098 		/*Command 1*/
2099 		desc = &cmd_q->qbase_desc[cmd_q->qidx];
2100 		memset(desc, 0, Q_DESC_SIZE);
2101 
2102 		CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2103 		CCP_CMD_INIT(desc) = 1;
2104 		CCP_CMD_FUNCTION(desc) = function.raw;
2105 
2106 		CCP_CMD_LEN(desc) = length;
2107 		CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2108 		CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2109 		CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2110 
2111 		CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2112 		CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2113 		CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2114 		CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2115 
2116 		cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2117 
2118 		/*Command 2*/
2119 		append_ptr = append_ptr + CCP_SB_BYTES;
2120 		memset(append_ptr, 0, AES_BLOCK_SIZE);
2121 		src_tb = rte_pktmbuf_mtod_offset(op->sym->m_src,
2122 						 uint8_t *,
2123 						 op->sym->auth.data.offset +
2124 						 length);
2125 		rte_memcpy(append_ptr, src_tb, non_align_len);
2126 		append_ptr[non_align_len] = CMAC_PAD_VALUE;
2127 
2128 		desc = &cmd_q->qbase_desc[cmd_q->qidx];
2129 		memset(desc, 0, Q_DESC_SIZE);
2130 
2131 		CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2132 		CCP_CMD_EOM(desc) = 1;
2133 		CCP_CMD_FUNCTION(desc) = function.raw;
2134 		CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2135 
2136 		CCP_CMD_SRC_LO(desc) = ((uint32_t)(dest_addr + CCP_SB_BYTES));
2137 		CCP_CMD_SRC_HI(desc) = high32_value(dest_addr + CCP_SB_BYTES);
2138 		CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2139 
2140 		CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2141 		CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2142 		CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2143 		CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2144 
2145 		cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2146 
2147 		rte_wmb();
2148 		tail =
2149 		(uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2150 		CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2151 		CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2152 			      cmd_q->qcontrol | CMD_Q_RUN);
2153 	}
2154 	/* Retrieve result */
2155 	pst.dest_addr = dest_addr;
2156 	pst.src_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2157 	pst.len = CCP_SB_BYTES;
2158 	pst.dir = 0;
2159 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2160 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2161 	ccp_perform_passthru(&pst, cmd_q);
2162 
2163 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2164 	return 0;
2165 }
2166 
2167 static int
2168 ccp_perform_aes(struct rte_crypto_op *op,
2169 		struct ccp_queue *cmd_q,
2170 		struct ccp_batch_info *b_info)
2171 {
2172 	struct ccp_session *session;
2173 	union ccp_function function;
2174 	uint8_t *lsb_buf;
2175 	struct ccp_passthru pst = {0};
2176 	struct ccp_desc *desc;
2177 	phys_addr_t src_addr, dest_addr, key_addr;
2178 	uint8_t *iv;
2179 
2180 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2181 	function.raw = 0;
2182 
2183 	iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2184 	if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB) {
2185 		if (session->cipher.um.aes_mode == CCP_AES_MODE_CTR) {
2186 			rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE,
2187 				   iv, session->iv.length);
2188 			pst.src_addr = (phys_addr_t)session->cipher.nonce_phys;
2189 			CCP_AES_SIZE(&function) = 0x1F;
2190 		} else {
2191 			lsb_buf =
2192 			&(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2193 			rte_memcpy(lsb_buf +
2194 				   (CCP_SB_BYTES - session->iv.length),
2195 				   iv, session->iv.length);
2196 			pst.src_addr = b_info->lsb_buf_phys +
2197 				(b_info->lsb_buf_idx * CCP_SB_BYTES);
2198 			b_info->lsb_buf_idx++;
2199 		}
2200 
2201 		pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2202 		pst.len = CCP_SB_BYTES;
2203 		pst.dir = 1;
2204 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2205 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2206 		ccp_perform_passthru(&pst, cmd_q);
2207 	}
2208 
2209 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
2210 
2211 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2212 					      op->sym->cipher.data.offset);
2213 	if (likely(op->sym->m_dst != NULL))
2214 		dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2215 						op->sym->cipher.data.offset);
2216 	else
2217 		dest_addr = src_addr;
2218 	key_addr = session->cipher.key_phys;
2219 
2220 	/* prepare desc for aes command */
2221 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2222 	CCP_CMD_INIT(desc) = 1;
2223 	CCP_CMD_EOM(desc) = 1;
2224 
2225 	CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2226 	CCP_AES_MODE(&function) = session->cipher.um.aes_mode;
2227 	CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2228 	CCP_CMD_FUNCTION(desc) = function.raw;
2229 
2230 	CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2231 
2232 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2233 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2234 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2235 
2236 	CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2237 	CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2238 	CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2239 
2240 	CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2241 	CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2242 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2243 
2244 	if (session->cipher.um.aes_mode != CCP_AES_MODE_ECB)
2245 		CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2246 
2247 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2248 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2249 	return 0;
2250 }
2251 
2252 static int
2253 ccp_perform_3des(struct rte_crypto_op *op,
2254 		struct ccp_queue *cmd_q,
2255 		struct ccp_batch_info *b_info)
2256 {
2257 	struct ccp_session *session;
2258 	union ccp_function function;
2259 	unsigned char *lsb_buf;
2260 	struct ccp_passthru pst;
2261 	struct ccp_desc *desc;
2262 	uint32_t tail;
2263 	uint8_t *iv;
2264 	phys_addr_t src_addr, dest_addr, key_addr;
2265 
2266 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2267 
2268 	iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2269 	switch (session->cipher.um.des_mode) {
2270 	case CCP_DES_MODE_CBC:
2271 		lsb_buf = &(b_info->lsb_buf[b_info->lsb_buf_idx*CCP_SB_BYTES]);
2272 		b_info->lsb_buf_idx++;
2273 
2274 		rte_memcpy(lsb_buf + (CCP_SB_BYTES - session->iv.length),
2275 			   iv, session->iv.length);
2276 		pst.src_addr = (phys_addr_t)rte_mem_virt2iova((void *) lsb_buf);
2277 		pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2278 		pst.len = CCP_SB_BYTES;
2279 		pst.dir = 1;
2280 		pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2281 		pst.byte_swap = CCP_PASSTHRU_BYTESWAP_256BIT;
2282 		ccp_perform_passthru(&pst, cmd_q);
2283 		break;
2284 	case CCP_DES_MODE_CFB:
2285 	case CCP_DES_MODE_ECB:
2286 		CCP_LOG_ERR("Unsupported DES cipher mode");
2287 		return -ENOTSUP;
2288 	}
2289 
2290 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2291 					      op->sym->cipher.data.offset);
2292 	if (unlikely(op->sym->m_dst != NULL))
2293 		dest_addr =
2294 			rte_pktmbuf_iova_offset(op->sym->m_dst,
2295 						   op->sym->cipher.data.offset);
2296 	else
2297 		dest_addr = src_addr;
2298 
2299 	key_addr = rte_mem_virt2iova(session->cipher.key_ccp);
2300 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
2301 
2302 	memset(desc, 0, Q_DESC_SIZE);
2303 
2304 	/* prepare desc for des command */
2305 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_3DES;
2306 
2307 	CCP_CMD_SOC(desc) = 0;
2308 	CCP_CMD_IOC(desc) = 0;
2309 	CCP_CMD_INIT(desc) = 1;
2310 	CCP_CMD_EOM(desc) = 1;
2311 	CCP_CMD_PROT(desc) = 0;
2312 
2313 	function.raw = 0;
2314 	CCP_DES_ENCRYPT(&function) = session->cipher.dir;
2315 	CCP_DES_MODE(&function) = session->cipher.um.des_mode;
2316 	CCP_DES_TYPE(&function) = session->cipher.ut.des_type;
2317 	CCP_CMD_FUNCTION(desc) = function.raw;
2318 
2319 	CCP_CMD_LEN(desc) = op->sym->cipher.data.length;
2320 
2321 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2322 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2323 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2324 
2325 	CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2326 	CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2327 	CCP_CMD_DST_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2328 
2329 	CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2330 	CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2331 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2332 
2333 	if (session->cipher.um.des_mode)
2334 		CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2335 
2336 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2337 
2338 	rte_wmb();
2339 
2340 	/* Write the new tail address back to the queue register */
2341 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2342 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2343 	/* Turn the queue back on using our cached control register */
2344 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2345 		      cmd_q->qcontrol | CMD_Q_RUN);
2346 
2347 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2348 	return 0;
2349 }
2350 
2351 static int
2352 ccp_perform_aes_gcm(struct rte_crypto_op *op, struct ccp_queue *cmd_q)
2353 {
2354 	struct ccp_session *session;
2355 	union ccp_function function;
2356 	uint8_t *iv;
2357 	struct ccp_passthru pst;
2358 	struct ccp_desc *desc;
2359 	uint32_t tail;
2360 	uint64_t *temp;
2361 	phys_addr_t src_addr, dest_addr, key_addr, aad_addr;
2362 	phys_addr_t digest_dest_addr;
2363 	int length, non_align_len;
2364 
2365 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2366 	iv = rte_crypto_op_ctod_offset(op, uint8_t *, session->iv.offset);
2367 	key_addr = session->cipher.key_phys;
2368 
2369 	src_addr = rte_pktmbuf_iova_offset(op->sym->m_src,
2370 					      op->sym->aead.data.offset);
2371 	if (unlikely(op->sym->m_dst != NULL))
2372 		dest_addr = rte_pktmbuf_iova_offset(op->sym->m_dst,
2373 						op->sym->aead.data.offset);
2374 	else
2375 		dest_addr = src_addr;
2376 	rte_pktmbuf_append(op->sym->m_src, session->auth.ctx_len);
2377 	digest_dest_addr = op->sym->aead.digest.phys_addr;
2378 	temp = (uint64_t *)(op->sym->aead.digest.data + AES_BLOCK_SIZE);
2379 	*temp++ = rte_bswap64(session->auth.aad_length << 3);
2380 	*temp = rte_bswap64(op->sym->aead.data.length << 3);
2381 
2382 	non_align_len = op->sym->aead.data.length % AES_BLOCK_SIZE;
2383 	length = CCP_ALIGN(op->sym->aead.data.length, AES_BLOCK_SIZE);
2384 
2385 	aad_addr = op->sym->aead.aad.phys_addr;
2386 
2387 	/* CMD1 IV Passthru */
2388 	rte_memcpy(session->cipher.nonce + AES_BLOCK_SIZE, iv,
2389 		   session->iv.length);
2390 	pst.src_addr = session->cipher.nonce_phys;
2391 	pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2392 	pst.len = CCP_SB_BYTES;
2393 	pst.dir = 1;
2394 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2395 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2396 	ccp_perform_passthru(&pst, cmd_q);
2397 
2398 	/* CMD2 GHASH-AAD */
2399 	function.raw = 0;
2400 	CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_AAD;
2401 	CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2402 	CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2403 
2404 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
2405 	memset(desc, 0, Q_DESC_SIZE);
2406 
2407 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2408 	CCP_CMD_INIT(desc) = 1;
2409 	CCP_CMD_FUNCTION(desc) = function.raw;
2410 
2411 	CCP_CMD_LEN(desc) = session->auth.aad_length;
2412 
2413 	CCP_CMD_SRC_LO(desc) = ((uint32_t)aad_addr);
2414 	CCP_CMD_SRC_HI(desc) = high32_value(aad_addr);
2415 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2416 
2417 	CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2418 	CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2419 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2420 
2421 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2422 
2423 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2424 	rte_wmb();
2425 
2426 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2427 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2428 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2429 		      cmd_q->qcontrol | CMD_Q_RUN);
2430 
2431 	/* CMD3 : GCTR Plain text */
2432 	function.raw = 0;
2433 	CCP_AES_ENCRYPT(&function) = session->cipher.dir;
2434 	CCP_AES_MODE(&function) = CCP_AES_MODE_GCTR;
2435 	CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2436 	if (non_align_len == 0)
2437 		CCP_AES_SIZE(&function) = (AES_BLOCK_SIZE << 3) - 1;
2438 	else
2439 		CCP_AES_SIZE(&function) = (non_align_len << 3) - 1;
2440 
2441 
2442 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
2443 	memset(desc, 0, Q_DESC_SIZE);
2444 
2445 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2446 	CCP_CMD_EOM(desc) = 1;
2447 	CCP_CMD_FUNCTION(desc) = function.raw;
2448 
2449 	CCP_CMD_LEN(desc) = length;
2450 
2451 	CCP_CMD_SRC_LO(desc) = ((uint32_t)src_addr);
2452 	CCP_CMD_SRC_HI(desc) = high32_value(src_addr);
2453 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2454 
2455 	CCP_CMD_DST_LO(desc) = ((uint32_t)dest_addr);
2456 	CCP_CMD_DST_HI(desc) = high32_value(dest_addr);
2457 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2458 
2459 	CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2460 	CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2461 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2462 
2463 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2464 
2465 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2466 	rte_wmb();
2467 
2468 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2469 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2470 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2471 		      cmd_q->qcontrol | CMD_Q_RUN);
2472 
2473 	/* CMD4 : PT to copy IV */
2474 	pst.src_addr = session->cipher.nonce_phys;
2475 	pst.dest_addr = (phys_addr_t)(cmd_q->sb_iv * CCP_SB_BYTES);
2476 	pst.len = AES_BLOCK_SIZE;
2477 	pst.dir = 1;
2478 	pst.bit_mod = CCP_PASSTHRU_BITWISE_NOOP;
2479 	pst.byte_swap = CCP_PASSTHRU_BYTESWAP_NOOP;
2480 	ccp_perform_passthru(&pst, cmd_q);
2481 
2482 	/* CMD5 : GHASH-Final */
2483 	function.raw = 0;
2484 	CCP_AES_ENCRYPT(&function) = CCP_AES_MODE_GHASH_FINAL;
2485 	CCP_AES_MODE(&function) = CCP_AES_MODE_GHASH;
2486 	CCP_AES_TYPE(&function) = session->cipher.ut.aes_type;
2487 
2488 	desc = &cmd_q->qbase_desc[cmd_q->qidx];
2489 	memset(desc, 0, Q_DESC_SIZE);
2490 
2491 	CCP_CMD_ENGINE(desc) = CCP_ENGINE_AES;
2492 	CCP_CMD_FUNCTION(desc) = function.raw;
2493 	/* Last block (AAD_len || PT_len)*/
2494 	CCP_CMD_LEN(desc) = AES_BLOCK_SIZE;
2495 
2496 	CCP_CMD_SRC_LO(desc) = ((uint32_t)digest_dest_addr + AES_BLOCK_SIZE);
2497 	CCP_CMD_SRC_HI(desc) = high32_value(digest_dest_addr + AES_BLOCK_SIZE);
2498 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2499 
2500 	CCP_CMD_DST_LO(desc) = ((uint32_t)digest_dest_addr);
2501 	CCP_CMD_DST_HI(desc) = high32_value(digest_dest_addr);
2502 	CCP_CMD_SRC_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2503 
2504 	CCP_CMD_KEY_LO(desc) = ((uint32_t)key_addr);
2505 	CCP_CMD_KEY_HI(desc) = high32_value(key_addr);
2506 	CCP_CMD_KEY_MEM(desc) = CCP_MEMTYPE_SYSTEM;
2507 
2508 	CCP_CMD_LSB_ID(desc) = cmd_q->sb_iv;
2509 
2510 	cmd_q->qidx = (cmd_q->qidx + 1) % COMMANDS_PER_QUEUE;
2511 	rte_wmb();
2512 
2513 	tail = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx * Q_DESC_SIZE);
2514 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE, tail);
2515 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2516 		      cmd_q->qcontrol | CMD_Q_RUN);
2517 
2518 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
2519 	return 0;
2520 }
2521 
2522 static inline int
2523 ccp_crypto_cipher(struct rte_crypto_op *op,
2524 		  struct ccp_queue *cmd_q,
2525 		  struct ccp_batch_info *b_info)
2526 {
2527 	int result = 0;
2528 	struct ccp_session *session;
2529 
2530 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2531 
2532 	switch (session->cipher.algo) {
2533 	case CCP_CIPHER_ALGO_AES_CBC:
2534 		result = ccp_perform_aes(op, cmd_q, b_info);
2535 		b_info->desccnt += 2;
2536 		break;
2537 	case CCP_CIPHER_ALGO_AES_CTR:
2538 		result = ccp_perform_aes(op, cmd_q, b_info);
2539 		b_info->desccnt += 2;
2540 		break;
2541 	case CCP_CIPHER_ALGO_AES_ECB:
2542 		result = ccp_perform_aes(op, cmd_q, b_info);
2543 		b_info->desccnt += 1;
2544 		break;
2545 	case CCP_CIPHER_ALGO_3DES_CBC:
2546 		result = ccp_perform_3des(op, cmd_q, b_info);
2547 		b_info->desccnt += 2;
2548 		break;
2549 	default:
2550 		CCP_LOG_ERR("Unsupported cipher algo %d",
2551 			    session->cipher.algo);
2552 		return -ENOTSUP;
2553 	}
2554 	return result;
2555 }
2556 
2557 static inline int
2558 ccp_crypto_auth(struct rte_crypto_op *op,
2559 		struct ccp_queue *cmd_q,
2560 		struct ccp_batch_info *b_info)
2561 {
2562 
2563 	int result = 0;
2564 	struct ccp_session *session;
2565 
2566 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2567 
2568 	switch (session->auth.algo) {
2569 	case CCP_AUTH_ALGO_SHA1:
2570 	case CCP_AUTH_ALGO_SHA224:
2571 	case CCP_AUTH_ALGO_SHA256:
2572 	case CCP_AUTH_ALGO_SHA384:
2573 	case CCP_AUTH_ALGO_SHA512:
2574 		result = ccp_perform_sha(op, cmd_q);
2575 		b_info->desccnt += 3;
2576 		break;
2577 	case CCP_AUTH_ALGO_MD5_HMAC:
2578 		if (session->auth_opt == 0)
2579 			result = -1;
2580 		break;
2581 	case CCP_AUTH_ALGO_SHA1_HMAC:
2582 	case CCP_AUTH_ALGO_SHA224_HMAC:
2583 	case CCP_AUTH_ALGO_SHA256_HMAC:
2584 		if (session->auth_opt == 0) {
2585 			result = ccp_perform_hmac(op, cmd_q);
2586 			b_info->desccnt += 6;
2587 		}
2588 		break;
2589 	case CCP_AUTH_ALGO_SHA384_HMAC:
2590 	case CCP_AUTH_ALGO_SHA512_HMAC:
2591 		if (session->auth_opt == 0) {
2592 			result = ccp_perform_hmac(op, cmd_q);
2593 			b_info->desccnt += 7;
2594 		}
2595 		break;
2596 	case CCP_AUTH_ALGO_SHA3_224:
2597 	case CCP_AUTH_ALGO_SHA3_256:
2598 	case CCP_AUTH_ALGO_SHA3_384:
2599 	case CCP_AUTH_ALGO_SHA3_512:
2600 		result = ccp_perform_sha3(op, cmd_q);
2601 		b_info->desccnt += 1;
2602 		break;
2603 	case CCP_AUTH_ALGO_SHA3_224_HMAC:
2604 	case CCP_AUTH_ALGO_SHA3_256_HMAC:
2605 		result = ccp_perform_sha3_hmac(op, cmd_q);
2606 		b_info->desccnt += 3;
2607 		break;
2608 	case CCP_AUTH_ALGO_SHA3_384_HMAC:
2609 	case CCP_AUTH_ALGO_SHA3_512_HMAC:
2610 		result = ccp_perform_sha3_hmac(op, cmd_q);
2611 		b_info->desccnt += 4;
2612 		break;
2613 	case CCP_AUTH_ALGO_AES_CMAC:
2614 		result = ccp_perform_aes_cmac(op, cmd_q);
2615 		b_info->desccnt += 4;
2616 		break;
2617 	default:
2618 		CCP_LOG_ERR("Unsupported auth algo %d",
2619 			    session->auth.algo);
2620 		return -ENOTSUP;
2621 	}
2622 
2623 	return result;
2624 }
2625 
2626 static inline int
2627 ccp_crypto_aead(struct rte_crypto_op *op,
2628 		struct ccp_queue *cmd_q,
2629 		struct ccp_batch_info *b_info)
2630 {
2631 	int result = 0;
2632 	struct ccp_session *session;
2633 
2634 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2635 
2636 	switch (session->auth.algo) {
2637 	case CCP_AUTH_ALGO_AES_GCM:
2638 		if (session->cipher.algo != CCP_CIPHER_ALGO_AES_GCM) {
2639 			CCP_LOG_ERR("Incorrect chain order");
2640 			return -1;
2641 		}
2642 		result = ccp_perform_aes_gcm(op, cmd_q);
2643 		b_info->desccnt += 5;
2644 		break;
2645 	default:
2646 		CCP_LOG_ERR("Unsupported aead algo %d",
2647 			    session->aead_algo);
2648 		return -ENOTSUP;
2649 	}
2650 	return result;
2651 }
2652 
2653 int
2654 process_ops_to_enqueue(struct ccp_qp *qp,
2655 		       struct rte_crypto_op **op,
2656 		       struct ccp_queue *cmd_q,
2657 		       uint16_t nb_ops,
2658 		       uint16_t total_nb_ops,
2659 		       int slots_req,
2660 		       uint16_t b_idx)
2661 {
2662 	int i, result = 0;
2663 	struct ccp_batch_info *b_info;
2664 	struct ccp_session *session;
2665 	EVP_MD_CTX *auth_ctx = NULL;
2666 
2667 	if (rte_mempool_get(qp->batch_mp, (void **)&b_info)) {
2668 		CCP_LOG_ERR("batch info allocation failed");
2669 		return 0;
2670 	}
2671 
2672 	auth_ctx = EVP_MD_CTX_create();
2673 	if (unlikely(!auth_ctx)) {
2674 		CCP_LOG_ERR("Unable to create auth ctx");
2675 		return 0;
2676 	}
2677 	b_info->auth_ctr = 0;
2678 
2679 	/* populate batch info necessary for dequeue */
2680 	b_info->op_idx = 0;
2681 	b_info->b_idx = 0;
2682 	b_info->lsb_buf_idx = 0;
2683 	b_info->desccnt = 0;
2684 	b_info->cmd_q = cmd_q;
2685 	b_info->lsb_buf_phys = (phys_addr_t)rte_mem_virt2iova((void *)b_info->lsb_buf);
2686 
2687 	rte_atomic64_sub(&b_info->cmd_q->free_slots, slots_req);
2688 
2689 	b_info->head_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2690 					 Q_DESC_SIZE);
2691 	for (i = b_idx; i < (nb_ops+b_idx); i++) {
2692 		session = CRYPTODEV_GET_SYM_SESS_PRIV(op[i]->sym->session);
2693 		switch (session->cmd_id) {
2694 		case CCP_CMD_CIPHER:
2695 			result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2696 			break;
2697 		case CCP_CMD_AUTH:
2698 			if (session->auth_opt) {
2699 				b_info->auth_ctr++;
2700 				result = cpu_crypto_auth(qp, op[i],
2701 							 session, auth_ctx);
2702 			} else
2703 				result = ccp_crypto_auth(op[i], cmd_q, b_info);
2704 			break;
2705 		case CCP_CMD_CIPHER_HASH:
2706 			result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2707 			if (result)
2708 				break;
2709 			result = ccp_crypto_auth(op[i], cmd_q, b_info);
2710 			break;
2711 		case CCP_CMD_HASH_CIPHER:
2712 			if (session->auth_opt) {
2713 				result = cpu_crypto_auth(qp, op[i],
2714 							 session, auth_ctx);
2715 				if (op[i]->status !=
2716 				    RTE_CRYPTO_OP_STATUS_SUCCESS)
2717 					CCP_LOG_ERR("RTE_CRYPTO_OP_STATUS_AUTH_FAILED");
2718 			} else
2719 				result = ccp_crypto_auth(op[i], cmd_q, b_info);
2720 
2721 			if (result)
2722 				break;
2723 			result = ccp_crypto_cipher(op[i], cmd_q, b_info);
2724 			break;
2725 		case CCP_CMD_COMBINED:
2726 			result = ccp_crypto_aead(op[i], cmd_q, b_info);
2727 			break;
2728 		default:
2729 			CCP_LOG_ERR("Unsupported cmd_id");
2730 			result = -1;
2731 		}
2732 		if (unlikely(result < 0)) {
2733 			rte_atomic64_add(&b_info->cmd_q->free_slots,
2734 					 (slots_req - b_info->desccnt));
2735 			break;
2736 		}
2737 		b_info->op[i] = op[i];
2738 	}
2739 
2740 	b_info->opcnt = i;
2741 	b_info->b_idx = b_idx;
2742 	b_info->total_nb_ops = total_nb_ops;
2743 	b_info->tail_offset = (uint32_t)(cmd_q->qbase_phys_addr + cmd_q->qidx *
2744 					 Q_DESC_SIZE);
2745 
2746 	rte_wmb();
2747 	/* Write the new tail address back to the queue register */
2748 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_TAIL_LO_BASE,
2749 			      b_info->tail_offset);
2750 	/* Turn the queue back on using our cached control register */
2751 	CCP_WRITE_REG(cmd_q->reg_base, CMD_Q_CONTROL_BASE,
2752 			      cmd_q->qcontrol | CMD_Q_RUN);
2753 
2754 	rte_ring_enqueue(qp->processed_pkts, (void *)b_info);
2755 
2756 	EVP_MD_CTX_destroy(auth_ctx);
2757 	return i-b_idx;
2758 }
2759 
2760 static inline void ccp_auth_dq_prepare(struct rte_crypto_op *op)
2761 {
2762 	struct ccp_session *session;
2763 	uint8_t *digest_data, *addr;
2764 	struct rte_mbuf *m_last;
2765 	int offset, digest_offset;
2766 	uint8_t digest_le[64];
2767 
2768 	session = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
2769 
2770 	if (session->cmd_id == CCP_CMD_COMBINED) {
2771 		digest_data = op->sym->aead.digest.data;
2772 		digest_offset = op->sym->aead.data.offset +
2773 					op->sym->aead.data.length;
2774 	} else {
2775 		digest_data = op->sym->auth.digest.data;
2776 		digest_offset = op->sym->auth.data.offset +
2777 					op->sym->auth.data.length;
2778 	}
2779 	m_last = rte_pktmbuf_lastseg(op->sym->m_src);
2780 	addr = (uint8_t *)((char *)m_last->buf_addr + m_last->data_off +
2781 			   m_last->data_len - session->auth.ctx_len);
2782 
2783 	rte_mb();
2784 	offset = session->auth.offset;
2785 
2786 	if (session->auth.engine == CCP_ENGINE_SHA)
2787 		if ((session->auth.ut.sha_type != CCP_SHA_TYPE_1) &&
2788 		    (session->auth.ut.sha_type != CCP_SHA_TYPE_224) &&
2789 		    (session->auth.ut.sha_type != CCP_SHA_TYPE_256)) {
2790 			/* All other algorithms require byte
2791 			 * swap done by host
2792 			 */
2793 			unsigned int i;
2794 
2795 			offset = session->auth.ctx_len -
2796 				session->auth.offset - 1;
2797 			for (i = 0; i < session->auth.digest_length; i++)
2798 				digest_le[i] = addr[offset - i];
2799 			offset = 0;
2800 			addr = digest_le;
2801 		}
2802 
2803 	op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2804 	if (session->auth.op == CCP_AUTH_OP_VERIFY) {
2805 		if (memcmp(addr + offset, digest_data,
2806 			   session->auth.digest_length) != 0)
2807 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
2808 
2809 	} else {
2810 		if (unlikely(digest_data == 0))
2811 			digest_data = rte_pktmbuf_mtod_offset(
2812 					op->sym->m_dst, uint8_t *,
2813 					digest_offset);
2814 		rte_memcpy(digest_data, addr + offset,
2815 			   session->auth.digest_length);
2816 	}
2817 	/* Trim area used for digest from mbuf. */
2818 	rte_pktmbuf_trim(op->sym->m_src,
2819 			 session->auth.ctx_len);
2820 }
2821 
2822 static int
2823 ccp_prepare_ops(struct ccp_qp *qp,
2824 		struct rte_crypto_op **op_d,
2825 		struct ccp_batch_info *b_info,
2826 		uint16_t nb_ops)
2827 {
2828 	int i, min_ops;
2829 	struct ccp_session *session;
2830 
2831 	EVP_MD_CTX *auth_ctx = NULL;
2832 
2833 	auth_ctx = EVP_MD_CTX_create();
2834 	if (unlikely(!auth_ctx)) {
2835 		CCP_LOG_ERR("Unable to create auth ctx");
2836 		return 0;
2837 	}
2838 	min_ops = RTE_MIN(nb_ops, b_info->opcnt);
2839 
2840 	for (i =  b_info->b_idx; i < min_ops; i++) {
2841 		op_d[i] = b_info->op[b_info->b_idx + b_info->op_idx++];
2842 		session = CRYPTODEV_GET_SYM_SESS_PRIV(op_d[i]->sym->session);
2843 		switch (session->cmd_id) {
2844 		case CCP_CMD_CIPHER:
2845 			op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2846 			break;
2847 		case CCP_CMD_AUTH:
2848 			if (session->auth_opt == 0)
2849 				ccp_auth_dq_prepare(op_d[i]);
2850 			break;
2851 		case CCP_CMD_CIPHER_HASH:
2852 			if (session->auth_opt)
2853 				cpu_crypto_auth(qp, op_d[i],
2854 						session, auth_ctx);
2855 			else
2856 				ccp_auth_dq_prepare(op_d[i]);
2857 			break;
2858 		case CCP_CMD_HASH_CIPHER:
2859 			if (session->auth_opt)
2860 				op_d[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
2861 			else
2862 				ccp_auth_dq_prepare(op_d[i]);
2863 			break;
2864 		case CCP_CMD_COMBINED:
2865 			ccp_auth_dq_prepare(op_d[i]);
2866 			break;
2867 		default:
2868 			CCP_LOG_ERR("Unsupported cmd_id");
2869 		}
2870 	}
2871 
2872 	EVP_MD_CTX_destroy(auth_ctx);
2873 	b_info->opcnt -= min_ops;
2874 	return min_ops;
2875 }
2876 
2877 int
2878 process_ops_to_dequeue(struct ccp_qp *qp,
2879 		       struct rte_crypto_op **op,
2880 		       uint16_t nb_ops,
2881 		       uint16_t *total_nb_ops)
2882 {
2883 	struct ccp_batch_info *b_info;
2884 	uint32_t cur_head_offset;
2885 
2886 	if (qp->b_info != NULL) {
2887 		b_info = qp->b_info;
2888 		if (unlikely(b_info->op_idx > 0))
2889 			goto success;
2890 	} else if (rte_ring_dequeue(qp->processed_pkts,
2891 				    (void **)&b_info))
2892 		return 0;
2893 
2894 	if (b_info->auth_ctr == b_info->opcnt)
2895 		goto success;
2896 	*total_nb_ops = b_info->total_nb_ops;
2897 	cur_head_offset = CCP_READ_REG(b_info->cmd_q->reg_base,
2898 				       CMD_Q_HEAD_LO_BASE);
2899 
2900 	if (b_info->head_offset < b_info->tail_offset) {
2901 		if ((cur_head_offset >= b_info->head_offset) &&
2902 		    (cur_head_offset < b_info->tail_offset)) {
2903 			qp->b_info = b_info;
2904 			return 0;
2905 		}
2906 	} else if (b_info->tail_offset != b_info->head_offset) {
2907 		if ((cur_head_offset >= b_info->head_offset) ||
2908 		    (cur_head_offset < b_info->tail_offset)) {
2909 			qp->b_info = b_info;
2910 			return 0;
2911 		}
2912 	}
2913 
2914 
2915 success:
2916 	*total_nb_ops = b_info->total_nb_ops;
2917 	nb_ops = ccp_prepare_ops(qp, op, b_info, nb_ops);
2918 	rte_atomic64_add(&b_info->cmd_q->free_slots, b_info->desccnt);
2919 	b_info->desccnt = 0;
2920 	if (b_info->opcnt > 0) {
2921 		qp->b_info = b_info;
2922 	} else {
2923 		rte_mempool_put(qp->batch_mp, (void *)b_info);
2924 		qp->b_info = NULL;
2925 	}
2926 
2927 	return nb_ops;
2928 }
2929