xref: /dpdk/drivers/common/cpt/cpt_ucode_asym.h (revision 8a97564b1c1e035daaa0cdda553edd46178889e2)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright (C) 2019 Marvell International Ltd.
3  */
4 
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
7 
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
11 
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
15 
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
18 {
19 	size_t i;
20 
21 	/* Strip leading NUL bytes */
22 
23 	for (i = 0; i < *len; i++) {
24 		if ((*data)[i] != 0)
25 			break;
26 	}
27 
28 	*data += i;
29 	*len -= i;
30 }
31 
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 		      struct rte_crypto_asym_xform *xform)
35 {
36 	struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 	size_t exp_len = xform->modex.exponent.length;
38 	size_t mod_len = xform->modex.modulus.length;
39 	uint8_t *exp = xform->modex.exponent.data;
40 	uint8_t *mod = xform->modex.modulus.data;
41 
42 	cpt_modex_param_normalize(&mod, &mod_len);
43 	cpt_modex_param_normalize(&exp, &exp_len);
44 
45 	if (unlikely(exp_len == 0 || mod_len == 0))
46 		return -EINVAL;
47 
48 	if (unlikely(exp_len > mod_len)) {
49 		CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
50 		return -ENOTSUP;
51 	}
52 
53 	/* Allocate buffer to hold modexp params */
54 	ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 	if (ctx->modulus.data == NULL) {
56 		CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
57 		return -ENOMEM;
58 	}
59 
60 	/* Set up modexp prime modulus and private exponent */
61 
62 	memcpy(ctx->modulus.data, mod, mod_len);
63 	ctx->exponent.data = ctx->modulus.data + mod_len;
64 	memcpy(ctx->exponent.data, exp, exp_len);
65 
66 	ctx->modulus.length = mod_len;
67 	ctx->exponent.length = exp_len;
68 
69 	return 0;
70 }
71 
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 		    struct rte_crypto_asym_xform *xform)
75 {
76 	struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 	struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 	struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 	size_t mod_len = xfrm_rsa->n.length;
80 	size_t exp_len = xfrm_rsa->e.length;
81 	uint64_t total_size;
82 	size_t len = 0;
83 
84 	/* Make sure key length used is not more than mod_len/2 */
85 	if (qt.p.data != NULL)
86 		len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
87 
88 	/* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 	total_size = mod_len + exp_len + 5 * len;
90 
91 	/* Allocate buffer to hold all RSA keys */
92 	rsa->n.data = rte_malloc(NULL, total_size, 0);
93 	if (rsa->n.data == NULL) {
94 		CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
95 		return -ENOMEM;
96 	}
97 
98 	/* Set up RSA prime modulus and public key exponent */
99 	memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 	rsa->e.data = rsa->n.data + mod_len;
101 	memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
102 
103 	/* Private key in quintuple format */
104 	if (len != 0) {
105 		rsa->qt.q.data = rsa->e.data + exp_len;
106 		memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 		rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 		memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 		rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 		memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 		rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 		memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 		rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 		memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
115 
116 		rsa->qt.q.length = qt.q.length;
117 		rsa->qt.dQ.length = qt.dQ.length;
118 		rsa->qt.p.length = qt.p.length;
119 		rsa->qt.dP.length = qt.dP.length;
120 		rsa->qt.qInv.length = qt.qInv.length;
121 	}
122 	rsa->n.length = mod_len;
123 	rsa->e.length = exp_len;
124 
125 	return 0;
126 }
127 
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 		      struct rte_crypto_asym_xform *xform)
131 {
132 	struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
133 
134 	switch (xform->ec.curve_id) {
135 	case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 		ec->curveid = CPT_EC_ID_P192;
137 		break;
138 	case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 		ec->curveid = CPT_EC_ID_P224;
140 		break;
141 	case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 		ec->curveid = CPT_EC_ID_P256;
143 		break;
144 	case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 		ec->curveid = CPT_EC_ID_P384;
146 		break;
147 	case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 		ec->curveid = CPT_EC_ID_P521;
149 		break;
150 	default:
151 		/* Only NIST curves (FIPS 186-4) are supported */
152 		CPT_LOG_DP_ERR("Unsupported curve");
153 		return -EINVAL;
154 	}
155 
156 	return 0;
157 }
158 
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 				 struct rte_crypto_asym_xform *xform)
162 {
163 	int ret;
164 
165 	sess->xfrm_type = xform->xform_type;
166 
167 	switch (xform->xform_type) {
168 	case RTE_CRYPTO_ASYM_XFORM_RSA:
169 		ret = cpt_fill_rsa_params(sess, xform);
170 		break;
171 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 		ret = cpt_fill_modex_params(sess, xform);
173 		break;
174 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
175 		/* Fall through */
176 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 		ret = cpt_fill_ec_params(sess, xform);
178 		break;
179 	default:
180 		CPT_LOG_DP_ERR("Unsupported transform type");
181 		return -ENOTSUP;
182 	}
183 	return ret;
184 }
185 
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
188 {
189 	struct rte_crypto_modex_xform *mod;
190 	struct rte_crypto_rsa_xform *rsa;
191 
192 	switch (sess->xfrm_type) {
193 	case RTE_CRYPTO_ASYM_XFORM_RSA:
194 		rsa = &sess->rsa_ctx;
195 		rte_free(rsa->n.data);
196 		break;
197 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
198 		mod = &sess->mod_ctx;
199 		rte_free(mod->modulus.data);
200 		break;
201 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
202 		/* Fall through */
203 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
204 		break;
205 	default:
206 		CPT_LOG_DP_ERR("Invalid transform type");
207 		break;
208 	}
209 }
210 
211 static __rte_always_inline void
212 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
213 {
214 	void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
215 
216 	/* Pointer to cpt_res_s, updated by CPT */
217 	req->completion_addr = (volatile uint64_t *)completion_addr;
218 	req->comp_baddr = addr.dma_addr +
219 			  RTE_PTR_DIFF(completion_addr, addr.vaddr);
220 	*(req->completion_addr) = COMPLETION_CODE_INIT;
221 }
222 
223 static __rte_always_inline int
224 cpt_modex_prep(struct asym_op_params *modex_params,
225 	       struct rte_crypto_modex_xform *mod)
226 {
227 	struct cpt_request_info *req = modex_params->req;
228 	phys_addr_t mphys = modex_params->meta_buf;
229 	uint32_t exp_len = mod->exponent.length;
230 	uint32_t mod_len = mod->modulus.length;
231 	struct rte_crypto_mod_op_param mod_op;
232 	struct rte_crypto_op **op;
233 	vq_cmd_word0_t vq_cmd_w0;
234 	uint64_t total_key_len;
235 	uint32_t dlen, rlen;
236 	uint32_t base_len;
237 	buf_ptr_t caddr;
238 	uint8_t *dptr;
239 
240 	/* Extracting modex op form params->req->op[1]->asym->modex */
241 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
242 	mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
243 
244 	base_len = mod_op.base.length;
245 	if (unlikely(base_len > mod_len)) {
246 		CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
247 		(*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
248 		return -ENOTSUP;
249 	}
250 
251 	total_key_len = mod_len + exp_len;
252 
253 	/* Input buffer */
254 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
255 	memcpy(dptr, mod->modulus.data, total_key_len);
256 	dptr += total_key_len;
257 	memcpy(dptr, mod_op.base.data, base_len);
258 	dptr += base_len;
259 	dlen = total_key_len + base_len;
260 
261 	/* Result buffer */
262 	rlen = mod_len;
263 
264 	/* Setup opcodes */
265 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
266 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
267 
268 	/* GP op header */
269 	vq_cmd_w0.s.param1 = mod_len;
270 	vq_cmd_w0.s.param2 = exp_len;
271 	vq_cmd_w0.s.dlen = dlen;
272 
273 	/* Filling cpt_request_info structure */
274 	req->ist.ei0 = vq_cmd_w0.u64;
275 	req->ist.ei1 = mphys;
276 	req->ist.ei2 = mphys + dlen;
277 
278 	/* Result pointer to store result data */
279 	req->rptr = dptr;
280 
281 	/* alternate_caddr to write completion status of the microcode */
282 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
283 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
284 
285 	/* Preparing completion addr, +1 for completion code */
286 	caddr.vaddr = dptr + rlen + 1;
287 	caddr.dma_addr = mphys + dlen + rlen + 1;
288 
289 	cpt_fill_req_comp_addr(req, caddr);
290 	return 0;
291 }
292 
293 static __rte_always_inline void
294 cpt_rsa_prep(struct asym_op_params *rsa_params,
295 	     struct rte_crypto_rsa_xform *rsa,
296 	     rte_crypto_param *crypto_param)
297 {
298 	struct cpt_request_info *req = rsa_params->req;
299 	phys_addr_t mphys = rsa_params->meta_buf;
300 	struct rte_crypto_rsa_op_param rsa_op;
301 	uint32_t mod_len = rsa->n.length;
302 	uint32_t exp_len = rsa->e.length;
303 	struct rte_crypto_op **op;
304 	vq_cmd_word0_t vq_cmd_w0;
305 	uint64_t total_key_len;
306 	uint32_t dlen, rlen;
307 	uint32_t in_size;
308 	buf_ptr_t caddr;
309 	uint8_t *dptr;
310 
311 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
312 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
313 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
314 	total_key_len  = mod_len + exp_len;
315 
316 	/* Input buffer */
317 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
318 	memcpy(dptr, rsa->n.data, total_key_len);
319 	dptr += total_key_len;
320 
321 	in_size = crypto_param->length;
322 	memcpy(dptr, crypto_param->data, in_size);
323 
324 	dptr += in_size;
325 	dlen = total_key_len + in_size;
326 
327 	/* Result buffer */
328 	rlen = mod_len;
329 
330 	if (rsa->padding.type == RTE_CRYPTO_RSA_PADDING_NONE) {
331 		/* Use mod_exp operation for no_padding type */
332 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
333 		vq_cmd_w0.s.param2 = exp_len;
334 	} else {
335 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
336 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
337 			/* Public key encrypt, use BT2*/
338 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
339 					((uint16_t)(exp_len) << 1);
340 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
341 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
342 			/* Public key decrypt, use BT1 */
343 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
344 			/* + 2 for decrypted len */
345 			rlen += 2;
346 		}
347 	}
348 
349 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
350 
351 	/* GP op header */
352 	vq_cmd_w0.s.param1 = mod_len;
353 	vq_cmd_w0.s.dlen = dlen;
354 
355 	/* Filling cpt_request_info structure */
356 	req->ist.ei0 = vq_cmd_w0.u64;
357 	req->ist.ei1 = mphys;
358 	req->ist.ei2 = mphys + dlen;
359 
360 	/* Result pointer to store result data */
361 	req->rptr = dptr;
362 
363 	/* alternate_caddr to write completion status of the microcode */
364 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
365 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
366 
367 	/* Preparing completion addr, +1 for completion code */
368 	caddr.vaddr = dptr + rlen + 1;
369 	caddr.dma_addr = mphys + dlen + rlen + 1;
370 
371 	cpt_fill_req_comp_addr(req, caddr);
372 }
373 
374 static __rte_always_inline void
375 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
376 		 struct rte_crypto_rsa_xform *rsa,
377 		 rte_crypto_param *crypto_param)
378 {
379 	struct cpt_request_info *req = rsa_params->req;
380 	phys_addr_t mphys = rsa_params->meta_buf;
381 	uint32_t qInv_len = rsa->qt.qInv.length;
382 	struct rte_crypto_rsa_op_param rsa_op;
383 	uint32_t dP_len = rsa->qt.dP.length;
384 	uint32_t dQ_len = rsa->qt.dQ.length;
385 	uint32_t p_len = rsa->qt.p.length;
386 	uint32_t q_len = rsa->qt.q.length;
387 	uint32_t mod_len = rsa->n.length;
388 	struct rte_crypto_op **op;
389 	vq_cmd_word0_t vq_cmd_w0;
390 	uint64_t total_key_len;
391 	uint32_t dlen, rlen;
392 	uint32_t in_size;
393 	buf_ptr_t caddr;
394 	uint8_t *dptr;
395 
396 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
397 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
398 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
399 	total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
400 
401 	/* Input buffer */
402 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
403 	memcpy(dptr, rsa->qt.q.data, total_key_len);
404 	dptr += total_key_len;
405 
406 	in_size = crypto_param->length;
407 	memcpy(dptr, crypto_param->data, in_size);
408 
409 	dptr += in_size;
410 	dlen = total_key_len + in_size;
411 
412 	/* Result buffer */
413 	rlen = mod_len;
414 
415 	if (rsa->padding.type == RTE_CRYPTO_RSA_PADDING_NONE) {
416 		/*Use mod_exp operation for no_padding type */
417 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
418 	} else {
419 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
420 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
421 			/* Private encrypt, use BT1 */
422 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
423 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
424 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
425 			/* Private decrypt, use BT2 */
426 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
427 			/* + 2 for decrypted len */
428 			rlen += 2;
429 		}
430 	}
431 
432 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
433 
434 	/* GP op header */
435 	vq_cmd_w0.s.param1 = mod_len;
436 	vq_cmd_w0.s.dlen = dlen;
437 
438 	/* Filling cpt_request_info structure */
439 	req->ist.ei0 = vq_cmd_w0.u64;
440 	req->ist.ei1 = mphys;
441 	req->ist.ei2 = mphys + dlen;
442 
443 	/* Result pointer to store result data */
444 	req->rptr = dptr;
445 
446 	/* alternate_caddr to write completion status of the microcode */
447 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
448 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
449 
450 	/* Preparing completion addr, +1 for completion code */
451 	caddr.vaddr = dptr + rlen + 1;
452 	caddr.dma_addr = mphys + dlen + rlen + 1;
453 
454 	cpt_fill_req_comp_addr(req, caddr);
455 }
456 
457 static __rte_always_inline int __rte_hot
458 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
459 	       struct asym_op_params *params,
460 	       struct cpt_asym_sess_misc *sess)
461 {
462 	struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
463 
464 	switch (rsa->op_type) {
465 	case RTE_CRYPTO_ASYM_OP_VERIFY:
466 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
467 		break;
468 	case RTE_CRYPTO_ASYM_OP_ENCRYPT:
469 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
470 		break;
471 	case RTE_CRYPTO_ASYM_OP_SIGN:
472 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
473 		break;
474 	case RTE_CRYPTO_ASYM_OP_DECRYPT:
475 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
476 		break;
477 	default:
478 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
479 		return -EINVAL;
480 	}
481 	return 0;
482 }
483 
484 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
485 	{
486 		.prime = {
487 				.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
488 					 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
489 					 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
490 					 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
491 				.length = 24,
492 			},
493 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
494 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
495 				   0x99, 0xDE, 0xF8, 0x36, 0x14, 0x6B,
496 				   0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31},
497 			  .length = 24},
498 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
499 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
500 				    0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
501 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC},
502 			   .length = 24},
503 		.constb = {.data = {0x64, 0x21, 0x05, 0x19, 0xE5, 0x9C,
504 				    0x80, 0xE7, 0x0F, 0xA7, 0xE9, 0xAB,
505 				    0x72, 0x24, 0x30, 0x49, 0xFE, 0xB8,
506 				    0xDE, 0xEC, 0xC1, 0x46, 0xB9, 0xB1},
507 			   .length = 24},
508 	},
509 	{
510 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
511 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
512 				   0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
513 				   0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
514 			  .length = 28},
515 		.order = {.data = {0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
516 				   0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
517 				   0X16, 0XA2, 0XE0, 0XB8, 0XF0, 0X3E, 0X13,
518 				   0XDD, 0X29, 0X45, 0X5C, 0X5C, 0X2A, 0X3D},
519 			  .length = 28},
520 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
521 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
522 				    0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
523 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE},
524 			   .length = 28},
525 		.constb = {.data = {0xB4, 0x05, 0x0A, 0x85, 0x0C, 0x04, 0xB3,
526 				    0xAB, 0xF5, 0x41, 0x32, 0x56, 0x50, 0x44,
527 				    0xB0, 0xB7, 0xD7, 0xBF, 0xD8, 0xBA, 0x27,
528 				    0x0B, 0x39, 0x43, 0x23, 0x55, 0xFF, 0xB4},
529 			   .length = 28},
530 	},
531 	{
532 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
533 				   0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
534 				   0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
535 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
536 				   0xFF, 0xFF, 0xFF, 0xFF},
537 			  .length = 32},
538 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
539 				   0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
540 				   0xFF, 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7,
541 				   0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2,
542 				   0xFC, 0x63, 0x25, 0x51},
543 			  .length = 32},
544 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
545 				    0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
546 				    0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
547 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
548 				    0xFF, 0xFF, 0xFF, 0xFC},
549 			   .length = 32},
550 		.constb = {.data = {0x5A, 0xC6, 0x35, 0xD8, 0xAA, 0x3A, 0x93,
551 				    0xE7, 0xB3, 0xEB, 0xBD, 0x55, 0x76, 0x98,
552 				    0x86, 0xBC, 0x65, 0x1D, 0x06, 0xB0, 0xCC,
553 				    0x53, 0xB0, 0xF6, 0x3B, 0xCE, 0x3C, 0x3E,
554 				    0x27, 0xD2, 0x60, 0x4B},
555 			   .length = 32},
556 	},
557 	{
558 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
559 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
560 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 				   0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
563 				   0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
564 				   0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
565 			  .length = 48},
566 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
567 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
568 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
569 				   0xFF, 0xFF, 0xFF, 0xC7, 0x63, 0x4D, 0x81,
570 				   0xF4, 0x37, 0x2D, 0xDF, 0x58, 0x1A, 0x0D,
571 				   0xB2, 0x48, 0xB0, 0xA7, 0x7A, 0xEC, 0xEC,
572 				   0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73},
573 			  .length = 48},
574 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
575 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
576 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 				    0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
579 				    0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
580 				    0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFC},
581 			   .length = 48},
582 		.constb = {.data = {0xB3, 0x31, 0x2F, 0xA7, 0xE2, 0x3E, 0xE7,
583 				    0xE4, 0x98, 0x8E, 0x05, 0x6B, 0xE3, 0xF8,
584 				    0x2D, 0x19, 0x18, 0x1D, 0x9C, 0x6E, 0xFE,
585 				    0x81, 0x41, 0x12, 0x03, 0x14, 0x08, 0x8F,
586 				    0x50, 0x13, 0x87, 0x5A, 0xC6, 0x56, 0x39,
587 				    0x8D, 0x8A, 0x2E, 0xD1, 0x9D, 0x2A, 0x85,
588 				    0xC8, 0xED, 0xD3, 0xEC, 0x2A, 0xEF},
589 			   .length = 48},
590 	},
591 	{.prime = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
592 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
593 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
594 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
595 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
596 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
597 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
598 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
599 			    0xFF, 0xFF},
600 		   .length = 66},
601 	 .order = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
602 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
603 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
604 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
605 			    0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
606 			    0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
607 			    0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
608 			    0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
609 			    0x64, 0x09},
610 		   .length = 66},
611 	 .consta = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
612 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
613 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
614 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
615 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
616 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
617 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
618 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
619 			     0xFF, 0xFC},
620 		    .length = 66},
621 	 .constb = {.data = {0x00, 0x51, 0x95, 0x3E, 0xB9, 0x61, 0x8E, 0x1C,
622 			     0x9A, 0x1F, 0x92, 0x9A, 0x21, 0xA0, 0xB6, 0x85,
623 			     0x40, 0xEE, 0xA2, 0xDA, 0x72, 0x5B, 0x99, 0xB3,
624 			     0x15, 0xF3, 0xB8, 0xB4, 0x89, 0x91, 0x8E, 0xF1,
625 			     0x09, 0xE1, 0x56, 0x19, 0x39, 0x51, 0xEC, 0x7E,
626 			     0x93, 0x7B, 0x16, 0x52, 0xC0, 0xBD, 0x3B, 0xB1,
627 			     0xBF, 0x07, 0x35, 0x73, 0xDF, 0x88, 0x3D, 0x2C,
628 			     0x34, 0xF1, 0xEF, 0x45, 0x1F, 0xD4, 0x6B, 0x50,
629 			     0x3F, 0x00},
630 		    .length = 66}}};
631 
632 static __rte_always_inline void
633 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
634 		    struct asym_op_params *ecdsa_params,
635 		    uint64_t fpm_table_iova,
636 		    struct cpt_asym_sess_misc *sess)
637 {
638 	struct cpt_request_info *req = ecdsa_params->req;
639 	uint16_t message_len = ecdsa->message.length;
640 	phys_addr_t mphys = ecdsa_params->meta_buf;
641 	uint16_t pkey_len = sess->ec_ctx.pkey.length;
642 	uint8_t curveid = sess->ec_ctx.curveid;
643 	uint16_t p_align, k_align, m_align;
644 	uint16_t k_len = ecdsa->k.length;
645 	uint16_t order_len, prime_len;
646 	uint16_t o_offset, pk_offset;
647 	vq_cmd_word0_t vq_cmd_w0;
648 	uint16_t rlen, dlen;
649 	buf_ptr_t caddr;
650 	uint8_t *dptr;
651 
652 	prime_len = ec_grp[curveid].prime.length;
653 	order_len = ec_grp[curveid].order.length;
654 
655 	/* Truncate input length to curve prime length */
656 	if (message_len > prime_len)
657 		message_len = prime_len;
658 	m_align = RTE_ALIGN_CEIL(message_len, 8);
659 
660 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
661 	k_align = RTE_ALIGN_CEIL(k_len, 8);
662 
663 	/* Set write offset for order and private key */
664 	o_offset = prime_len - order_len;
665 	pk_offset = prime_len - pkey_len;
666 
667 	/* Input buffer */
668 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
669 
670 	/*
671 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
672 	 * ROUNDUP8(priv key len, prime len, order len)).
673 	 * Please note, private key, order cannot exceed prime
674 	 * length i.e 3 * p_align.
675 	 */
676 	dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
677 
678 	memset(dptr, 0, dlen);
679 
680 	*(uint64_t *)dptr = fpm_table_iova;
681 	dptr += sizeof(fpm_table_iova);
682 
683 	memcpy(dptr, ecdsa->k.data, k_len);
684 	dptr += k_align;
685 
686 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
687 	dptr += p_align;
688 
689 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
690 	dptr += p_align;
691 
692 	memcpy(dptr + pk_offset, sess->ec_ctx.pkey.data, pkey_len);
693 	dptr += p_align;
694 
695 	memcpy(dptr, ecdsa->message.data, message_len);
696 	dptr += m_align;
697 
698 	memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
699 	dptr += p_align;
700 
701 	memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
702 	dptr += p_align;
703 
704 	/* 2 * prime length (for sign r and s ) */
705 	rlen = 2 * p_align;
706 
707 	/* Setup opcodes */
708 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
709 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
710 
711 	/* GP op header */
712 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
713 	vq_cmd_w0.s.param2 = (pkey_len << 8) | k_len;
714 	vq_cmd_w0.s.dlen = dlen;
715 
716 	/* Filling cpt_request_info structure */
717 	req->ist.ei0 = vq_cmd_w0.u64;
718 	req->ist.ei1 = mphys;
719 	req->ist.ei2 = mphys + dlen;
720 
721 	/* Result pointer to store result data */
722 	req->rptr = dptr;
723 
724 	/* alternate_caddr to write completion status of the microcode */
725 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
726 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
727 
728 	/* Preparing completion addr, +1 for completion code */
729 	caddr.vaddr = dptr + rlen + 1;
730 	caddr.dma_addr = mphys + dlen + rlen + 1;
731 
732 	cpt_fill_req_comp_addr(req, caddr);
733 }
734 
735 static __rte_always_inline void
736 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
737 		      struct asym_op_params *ecdsa_params,
738 		      uint64_t fpm_table_iova,
739 		      struct cpt_asym_sess_misc *sess)
740 {
741 	struct cpt_request_info *req = ecdsa_params->req;
742 	uint32_t message_len = ecdsa->message.length;
743 	phys_addr_t mphys = ecdsa_params->meta_buf;
744 	uint16_t qx_len = sess->ec_ctx.q.x.length;
745 	uint16_t qy_len = sess->ec_ctx.q.y.length;
746 	uint8_t curveid = sess->ec_ctx.curveid;
747 	uint16_t o_offset, r_offset, s_offset;
748 	uint16_t r_len = ecdsa->r.length;
749 	uint16_t s_len = ecdsa->s.length;
750 	uint16_t order_len, prime_len;
751 	uint16_t qx_offset, qy_offset;
752 	uint16_t p_align, m_align;
753 	vq_cmd_word0_t vq_cmd_w0;
754 	buf_ptr_t caddr;
755 	uint16_t dlen;
756 	uint8_t *dptr;
757 
758 	prime_len = ec_grp[curveid].prime.length;
759 	order_len = ec_grp[curveid].order.length;
760 
761 	/* Truncate input length to curve prime length */
762 	if (message_len > prime_len)
763 		message_len = prime_len;
764 
765 	m_align = RTE_ALIGN_CEIL(message_len, 8);
766 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
767 
768 	/* Set write offset for sign, order and public key coordinates */
769 	o_offset = prime_len - order_len;
770 	qx_offset = prime_len - qx_len;
771 	qy_offset = prime_len - qy_len;
772 	r_offset = prime_len - r_len;
773 	s_offset = prime_len - s_len;
774 
775 	/* Input buffer */
776 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
777 
778 	/*
779 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
780 	 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
781 	 * prime len, order len)).
782 	 * Please note sign, public key and order can not exceed prime length
783 	 * i.e. 6 * p_align
784 	 */
785 	dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
786 
787 	memset(dptr, 0, dlen);
788 
789 	*(uint64_t *)dptr = fpm_table_iova;
790 	dptr += sizeof(fpm_table_iova);
791 
792 	memcpy(dptr + r_offset, ecdsa->r.data, r_len);
793 	dptr += p_align;
794 
795 	memcpy(dptr + s_offset, ecdsa->s.data, s_len);
796 	dptr += p_align;
797 
798 	memcpy(dptr, ecdsa->message.data, message_len);
799 	dptr += m_align;
800 
801 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
802 	dptr += p_align;
803 
804 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
805 	dptr += p_align;
806 
807 	memcpy(dptr + qx_offset, sess->ec_ctx.q.x.data, qx_len);
808 	dptr += p_align;
809 
810 	memcpy(dptr + qy_offset, sess->ec_ctx.q.y.data, qy_len);
811 	dptr += p_align;
812 
813 	memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
814 	dptr += p_align;
815 
816 	memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
817 	dptr += p_align;
818 
819 	/* Setup opcodes */
820 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
821 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
822 
823 	/* GP op header */
824 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
825 	vq_cmd_w0.s.param2 = 0;
826 	vq_cmd_w0.s.dlen = dlen;
827 
828 	/* Filling cpt_request_info structure */
829 	req->ist.ei0 = vq_cmd_w0.u64;
830 	req->ist.ei1 = mphys;
831 	req->ist.ei2 = mphys + dlen;
832 
833 	/* Result pointer to store result data */
834 	req->rptr = dptr;
835 
836 	/* alternate_caddr to write completion status of the microcode */
837 	req->alternate_caddr = (uint64_t *)dptr;
838 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
839 
840 	/* Preparing completion addr, +1 for completion code */
841 	caddr.vaddr = dptr + 1;
842 	caddr.dma_addr = mphys + dlen + 1;
843 
844 	cpt_fill_req_comp_addr(req, caddr);
845 }
846 
847 static __rte_always_inline int __rte_hot
848 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
849 		     struct asym_op_params *params,
850 		     struct cpt_asym_sess_misc *sess,
851 		     uint64_t *fpm_iova)
852 {
853 	struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
854 	uint8_t curveid = sess->ec_ctx.curveid;
855 
856 	if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
857 		cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], sess);
858 	else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
859 		cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
860 				      sess);
861 	else {
862 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
863 		return -EINVAL;
864 	}
865 	return 0;
866 }
867 
868 static __rte_always_inline int
869 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
870 	      struct asym_op_params *asym_params,
871 	      uint8_t curveid)
872 {
873 	struct cpt_request_info *req = asym_params->req;
874 	phys_addr_t mphys = asym_params->meta_buf;
875 	uint16_t x1_len = ecpm->p.x.length;
876 	uint16_t y1_len = ecpm->p.y.length;
877 	uint16_t scalar_align, p_align;
878 	uint16_t dlen, rlen, prime_len;
879 	uint16_t x1_offset, y1_offset;
880 	vq_cmd_word0_t vq_cmd_w0;
881 	buf_ptr_t caddr;
882 	uint8_t *dptr;
883 
884 	prime_len = ec_grp[curveid].prime.length;
885 
886 	/* Input buffer */
887 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
888 
889 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
890 	scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
891 
892 	/*
893 	 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
894 	 * scalar length),
895 	 * Please note point length is equivalent to prime of the curve
896 	 */
897 	dlen = 5 * p_align + scalar_align;
898 
899 	x1_offset = prime_len - x1_len;
900 	y1_offset = prime_len - y1_len;
901 
902 	memset(dptr, 0, dlen);
903 
904 	/* Copy input point, scalar, prime */
905 	memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
906 	dptr += p_align;
907 	memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
908 	dptr += p_align;
909 	memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
910 	dptr += scalar_align;
911 	memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
912 	dptr += p_align;
913 
914 	memcpy(dptr, ec_grp[curveid].consta.data,
915 	       ec_grp[curveid].consta.length);
916 	dptr += p_align;
917 
918 	memcpy(dptr, ec_grp[curveid].constb.data,
919 	       ec_grp[curveid].constb.length);
920 	dptr += p_align;
921 
922 	/* Setup opcodes */
923 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
924 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
925 
926 	/* GP op header */
927 	vq_cmd_w0.s.param1 = curveid;
928 	vq_cmd_w0.s.param2 = ecpm->scalar.length;
929 	vq_cmd_w0.s.dlen = dlen;
930 
931 	/* Filling cpt_request_info structure */
932 	req->ist.ei0 = vq_cmd_w0.u64;
933 	req->ist.ei1 = mphys;
934 	req->ist.ei2 = mphys + dlen;
935 
936 	/* Result buffer will store output point where length of
937 	 * each coordinate will be of prime length, thus set
938 	 * rlen to twice of prime length.
939 	 */
940 	rlen = p_align << 1;
941 	req->rptr = dptr;
942 
943 	/* alternate_caddr to write completion status by the microcode */
944 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
945 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
946 
947 	/* Preparing completion addr, +1 for completion code */
948 	caddr.vaddr = dptr + rlen + 1;
949 	caddr.dma_addr = mphys + dlen + rlen + 1;
950 
951 	cpt_fill_req_comp_addr(req, caddr);
952 	return 0;
953 }
954 #endif /* _CPT_UCODE_ASYM_H_ */
955