xref: /dpdk/drivers/common/cpt/cpt_ucode_asym.h (revision d38febb08d57fec29fed27a2d12a507fc6fcdfa1)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright (C) 2019 Marvell International Ltd.
3  */
4 
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
7 
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
11 
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
15 
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
18 {
19 	size_t i;
20 
21 	/* Strip leading NUL bytes */
22 
23 	for (i = 0; i < *len; i++) {
24 		if ((*data)[i] != 0)
25 			break;
26 	}
27 
28 	*data += i;
29 	*len -= i;
30 }
31 
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 		      struct rte_crypto_asym_xform *xform)
35 {
36 	struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 	size_t exp_len = xform->modex.exponent.length;
38 	size_t mod_len = xform->modex.modulus.length;
39 	uint8_t *exp = xform->modex.exponent.data;
40 	uint8_t *mod = xform->modex.modulus.data;
41 
42 	cpt_modex_param_normalize(&mod, &mod_len);
43 	cpt_modex_param_normalize(&exp, &exp_len);
44 
45 	if (unlikely(exp_len == 0 || mod_len == 0))
46 		return -EINVAL;
47 
48 	if (unlikely(exp_len > mod_len)) {
49 		CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
50 		return -ENOTSUP;
51 	}
52 
53 	/* Allocate buffer to hold modexp params */
54 	ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 	if (ctx->modulus.data == NULL) {
56 		CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
57 		return -ENOMEM;
58 	}
59 
60 	/* Set up modexp prime modulus and private exponent */
61 
62 	memcpy(ctx->modulus.data, mod, mod_len);
63 	ctx->exponent.data = ctx->modulus.data + mod_len;
64 	memcpy(ctx->exponent.data, exp, exp_len);
65 
66 	ctx->modulus.length = mod_len;
67 	ctx->exponent.length = exp_len;
68 
69 	return 0;
70 }
71 
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 		    struct rte_crypto_asym_xform *xform)
75 {
76 	struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 	struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 	struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 	size_t mod_len = xfrm_rsa->n.length;
80 	size_t exp_len = xfrm_rsa->e.length;
81 	uint64_t total_size;
82 	size_t len = 0;
83 
84 	/* Make sure key length used is not more than mod_len/2 */
85 	if (qt.p.data != NULL)
86 		len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
87 
88 	/* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 	total_size = mod_len + exp_len + 5 * len;
90 
91 	/* Allocate buffer to hold all RSA keys */
92 	rsa->n.data = rte_malloc(NULL, total_size, 0);
93 	if (rsa->n.data == NULL) {
94 		CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
95 		return -ENOMEM;
96 	}
97 
98 	/* Set up RSA prime modulus and public key exponent */
99 	memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 	rsa->e.data = rsa->n.data + mod_len;
101 	memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
102 
103 	/* Private key in quintuple format */
104 	if (len != 0) {
105 		rsa->qt.q.data = rsa->e.data + exp_len;
106 		memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 		rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 		memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 		rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 		memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 		rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 		memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 		rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 		memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
115 
116 		rsa->qt.q.length = qt.q.length;
117 		rsa->qt.dQ.length = qt.dQ.length;
118 		rsa->qt.p.length = qt.p.length;
119 		rsa->qt.dP.length = qt.dP.length;
120 		rsa->qt.qInv.length = qt.qInv.length;
121 	}
122 	rsa->n.length = mod_len;
123 	rsa->e.length = exp_len;
124 
125 	return 0;
126 }
127 
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 		      struct rte_crypto_asym_xform *xform)
131 {
132 	struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
133 
134 	switch (xform->ec.curve_id) {
135 	case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 		ec->curveid = CPT_EC_ID_P192;
137 		break;
138 	case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 		ec->curveid = CPT_EC_ID_P224;
140 		break;
141 	case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 		ec->curveid = CPT_EC_ID_P256;
143 		break;
144 	case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 		ec->curveid = CPT_EC_ID_P384;
146 		break;
147 	case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 		ec->curveid = CPT_EC_ID_P521;
149 		break;
150 	default:
151 		/* Only NIST curves (FIPS 186-4) are supported */
152 		CPT_LOG_DP_ERR("Unsupported curve");
153 		return -EINVAL;
154 	}
155 
156 	return 0;
157 }
158 
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 				 struct rte_crypto_asym_xform *xform)
162 {
163 	int ret;
164 
165 	sess->xfrm_type = xform->xform_type;
166 
167 	switch (xform->xform_type) {
168 	case RTE_CRYPTO_ASYM_XFORM_RSA:
169 		ret = cpt_fill_rsa_params(sess, xform);
170 		break;
171 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 		ret = cpt_fill_modex_params(sess, xform);
173 		break;
174 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
175 		/* Fall through */
176 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 		ret = cpt_fill_ec_params(sess, xform);
178 		break;
179 	default:
180 		CPT_LOG_DP_ERR("Unsupported transform type");
181 		return -ENOTSUP;
182 	}
183 	return ret;
184 }
185 
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
188 {
189 	struct rte_crypto_modex_xform *mod;
190 	struct rte_crypto_rsa_xform *rsa;
191 
192 	switch (sess->xfrm_type) {
193 	case RTE_CRYPTO_ASYM_XFORM_RSA:
194 		rsa = &sess->rsa_ctx;
195 		if (rsa->n.data)
196 			rte_free(rsa->n.data);
197 		break;
198 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
199 		mod = &sess->mod_ctx;
200 		if (mod->modulus.data)
201 			rte_free(mod->modulus.data);
202 		break;
203 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
204 		/* Fall through */
205 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
206 		break;
207 	default:
208 		CPT_LOG_DP_ERR("Invalid transform type");
209 		break;
210 	}
211 }
212 
213 static __rte_always_inline void
214 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
215 {
216 	void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
217 
218 	/* Pointer to cpt_res_s, updated by CPT */
219 	req->completion_addr = (volatile uint64_t *)completion_addr;
220 	req->comp_baddr = addr.dma_addr +
221 			  RTE_PTR_DIFF(completion_addr, addr.vaddr);
222 	*(req->completion_addr) = COMPLETION_CODE_INIT;
223 }
224 
225 static __rte_always_inline int
226 cpt_modex_prep(struct asym_op_params *modex_params,
227 	       struct rte_crypto_modex_xform *mod)
228 {
229 	struct cpt_request_info *req = modex_params->req;
230 	phys_addr_t mphys = modex_params->meta_buf;
231 	uint32_t exp_len = mod->exponent.length;
232 	uint32_t mod_len = mod->modulus.length;
233 	struct rte_crypto_mod_op_param mod_op;
234 	struct rte_crypto_op **op;
235 	vq_cmd_word0_t vq_cmd_w0;
236 	uint64_t total_key_len;
237 	uint32_t dlen, rlen;
238 	uint32_t base_len;
239 	buf_ptr_t caddr;
240 	uint8_t *dptr;
241 
242 	/* Extracting modex op form params->req->op[1]->asym->modex */
243 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
244 	mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
245 
246 	base_len = mod_op.base.length;
247 	if (unlikely(base_len > mod_len)) {
248 		CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
249 		(*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
250 		return -ENOTSUP;
251 	}
252 
253 	total_key_len = mod_len + exp_len;
254 
255 	/* Input buffer */
256 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
257 	memcpy(dptr, mod->modulus.data, total_key_len);
258 	dptr += total_key_len;
259 	memcpy(dptr, mod_op.base.data, base_len);
260 	dptr += base_len;
261 	dlen = total_key_len + base_len;
262 
263 	/* Result buffer */
264 	rlen = mod_len;
265 
266 	/* Setup opcodes */
267 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
268 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
269 
270 	/* GP op header */
271 	vq_cmd_w0.s.param1 = mod_len;
272 	vq_cmd_w0.s.param2 = exp_len;
273 	vq_cmd_w0.s.dlen = dlen;
274 
275 	/* Filling cpt_request_info structure */
276 	req->ist.ei0 = vq_cmd_w0.u64;
277 	req->ist.ei1 = mphys;
278 	req->ist.ei2 = mphys + dlen;
279 
280 	/* Result pointer to store result data */
281 	req->rptr = dptr;
282 
283 	/* alternate_caddr to write completion status of the microcode */
284 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
285 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
286 
287 	/* Preparing completion addr, +1 for completion code */
288 	caddr.vaddr = dptr + rlen + 1;
289 	caddr.dma_addr = mphys + dlen + rlen + 1;
290 
291 	cpt_fill_req_comp_addr(req, caddr);
292 	return 0;
293 }
294 
295 static __rte_always_inline void
296 cpt_rsa_prep(struct asym_op_params *rsa_params,
297 	     struct rte_crypto_rsa_xform *rsa,
298 	     rte_crypto_param *crypto_param)
299 {
300 	struct cpt_request_info *req = rsa_params->req;
301 	phys_addr_t mphys = rsa_params->meta_buf;
302 	struct rte_crypto_rsa_op_param rsa_op;
303 	uint32_t mod_len = rsa->n.length;
304 	uint32_t exp_len = rsa->e.length;
305 	struct rte_crypto_op **op;
306 	vq_cmd_word0_t vq_cmd_w0;
307 	uint64_t total_key_len;
308 	uint32_t dlen, rlen;
309 	uint32_t in_size;
310 	buf_ptr_t caddr;
311 	uint8_t *dptr;
312 
313 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
314 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
315 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
316 	total_key_len  = mod_len + exp_len;
317 
318 	/* Input buffer */
319 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
320 	memcpy(dptr, rsa->n.data, total_key_len);
321 	dptr += total_key_len;
322 
323 	in_size = crypto_param->length;
324 	memcpy(dptr, crypto_param->data, in_size);
325 
326 	dptr += in_size;
327 	dlen = total_key_len + in_size;
328 
329 	/* Result buffer */
330 	rlen = mod_len;
331 
332 	if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
333 		/* Use mod_exp operation for no_padding type */
334 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
335 		vq_cmd_w0.s.param2 = exp_len;
336 	} else {
337 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
338 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
339 			/* Public key encrypt, use BT2*/
340 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
341 					((uint16_t)(exp_len) << 1);
342 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
343 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
344 			/* Public key decrypt, use BT1 */
345 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
346 			/* + 2 for decrypted len */
347 			rlen += 2;
348 		}
349 	}
350 
351 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
352 
353 	/* GP op header */
354 	vq_cmd_w0.s.param1 = mod_len;
355 	vq_cmd_w0.s.dlen = dlen;
356 
357 	/* Filling cpt_request_info structure */
358 	req->ist.ei0 = vq_cmd_w0.u64;
359 	req->ist.ei1 = mphys;
360 	req->ist.ei2 = mphys + dlen;
361 
362 	/* Result pointer to store result data */
363 	req->rptr = dptr;
364 
365 	/* alternate_caddr to write completion status of the microcode */
366 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
367 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
368 
369 	/* Preparing completion addr, +1 for completion code */
370 	caddr.vaddr = dptr + rlen + 1;
371 	caddr.dma_addr = mphys + dlen + rlen + 1;
372 
373 	cpt_fill_req_comp_addr(req, caddr);
374 }
375 
376 static __rte_always_inline void
377 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
378 		 struct rte_crypto_rsa_xform *rsa,
379 		 rte_crypto_param *crypto_param)
380 {
381 	struct cpt_request_info *req = rsa_params->req;
382 	phys_addr_t mphys = rsa_params->meta_buf;
383 	uint32_t qInv_len = rsa->qt.qInv.length;
384 	struct rte_crypto_rsa_op_param rsa_op;
385 	uint32_t dP_len = rsa->qt.dP.length;
386 	uint32_t dQ_len = rsa->qt.dQ.length;
387 	uint32_t p_len = rsa->qt.p.length;
388 	uint32_t q_len = rsa->qt.q.length;
389 	uint32_t mod_len = rsa->n.length;
390 	struct rte_crypto_op **op;
391 	vq_cmd_word0_t vq_cmd_w0;
392 	uint64_t total_key_len;
393 	uint32_t dlen, rlen;
394 	uint32_t in_size;
395 	buf_ptr_t caddr;
396 	uint8_t *dptr;
397 
398 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
399 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
400 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
401 	total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
402 
403 	/* Input buffer */
404 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
405 	memcpy(dptr, rsa->qt.q.data, total_key_len);
406 	dptr += total_key_len;
407 
408 	in_size = crypto_param->length;
409 	memcpy(dptr, crypto_param->data, in_size);
410 
411 	dptr += in_size;
412 	dlen = total_key_len + in_size;
413 
414 	/* Result buffer */
415 	rlen = mod_len;
416 
417 	if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
418 		/*Use mod_exp operation for no_padding type */
419 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
420 	} else {
421 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
422 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
423 			/* Private encrypt, use BT1 */
424 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
425 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
426 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
427 			/* Private decrypt, use BT2 */
428 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
429 			/* + 2 for decrypted len */
430 			rlen += 2;
431 		}
432 	}
433 
434 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
435 
436 	/* GP op header */
437 	vq_cmd_w0.s.param1 = mod_len;
438 	vq_cmd_w0.s.dlen = dlen;
439 
440 	/* Filling cpt_request_info structure */
441 	req->ist.ei0 = vq_cmd_w0.u64;
442 	req->ist.ei1 = mphys;
443 	req->ist.ei2 = mphys + dlen;
444 
445 	/* Result pointer to store result data */
446 	req->rptr = dptr;
447 
448 	/* alternate_caddr to write completion status of the microcode */
449 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
450 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
451 
452 	/* Preparing completion addr, +1 for completion code */
453 	caddr.vaddr = dptr + rlen + 1;
454 	caddr.dma_addr = mphys + dlen + rlen + 1;
455 
456 	cpt_fill_req_comp_addr(req, caddr);
457 }
458 
459 static __rte_always_inline int __rte_hot
460 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
461 	       struct asym_op_params *params,
462 	       struct cpt_asym_sess_misc *sess)
463 {
464 	struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
465 
466 	switch (rsa->op_type) {
467 	case RTE_CRYPTO_ASYM_OP_VERIFY:
468 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
469 		break;
470 	case RTE_CRYPTO_ASYM_OP_ENCRYPT:
471 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
472 		break;
473 	case RTE_CRYPTO_ASYM_OP_SIGN:
474 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
475 		break;
476 	case RTE_CRYPTO_ASYM_OP_DECRYPT:
477 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
478 		break;
479 	default:
480 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
481 		return -EINVAL;
482 	}
483 	return 0;
484 }
485 
486 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
487 	{
488 		.prime = {
489 				.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
490 					 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
491 					 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
492 					 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF},
493 				.length = 24,
494 			},
495 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
496 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
497 				   0x99, 0xDE, 0xF8, 0x36, 0x14, 0x6B,
498 				   0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31},
499 			  .length = 24},
500 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
501 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
502 				    0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
503 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFC},
504 			   .length = 24},
505 		.constb = {.data = {0x64, 0x21, 0x05, 0x19, 0xE5, 0x9C,
506 				    0x80, 0xE7, 0x0F, 0xA7, 0xE9, 0xAB,
507 				    0x72, 0x24, 0x30, 0x49, 0xFE, 0xB8,
508 				    0xDE, 0xEC, 0xC1, 0x46, 0xB9, 0xB1},
509 			   .length = 24},
510 	},
511 	{
512 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
513 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
514 				   0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00,
515 				   0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01},
516 			  .length = 28},
517 		.order = {.data = {0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
518 				   0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
519 				   0X16, 0XA2, 0XE0, 0XB8, 0XF0, 0X3E, 0X13,
520 				   0XDD, 0X29, 0X45, 0X5C, 0X5C, 0X2A, 0X3D},
521 			  .length = 28},
522 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
523 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
524 				    0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
525 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE},
526 			   .length = 28},
527 		.constb = {.data = {0xB4, 0x05, 0x0A, 0x85, 0x0C, 0x04, 0xB3,
528 				    0xAB, 0xF5, 0x41, 0x32, 0x56, 0x50, 0x44,
529 				    0xB0, 0xB7, 0xD7, 0xBF, 0xD8, 0xBA, 0x27,
530 				    0x0B, 0x39, 0x43, 0x23, 0x55, 0xFF, 0xB4},
531 			   .length = 28},
532 	},
533 	{
534 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
535 				   0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
536 				   0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
537 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
538 				   0xFF, 0xFF, 0xFF, 0xFF},
539 			  .length = 32},
540 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
541 				   0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
542 				   0xFF, 0xFF, 0xBC, 0xE6, 0xFA, 0xAD, 0xA7,
543 				   0x17, 0x9E, 0x84, 0xF3, 0xB9, 0xCA, 0xC2,
544 				   0xFC, 0x63, 0x25, 0x51},
545 			  .length = 32},
546 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00,
547 				    0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
548 				    0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
549 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
550 				    0xFF, 0xFF, 0xFF, 0xFC},
551 			   .length = 32},
552 		.constb = {.data = {0x5A, 0xC6, 0x35, 0xD8, 0xAA, 0x3A, 0x93,
553 				    0xE7, 0xB3, 0xEB, 0xBD, 0x55, 0x76, 0x98,
554 				    0x86, 0xBC, 0x65, 0x1D, 0x06, 0xB0, 0xCC,
555 				    0x53, 0xB0, 0xF6, 0x3B, 0xCE, 0x3C, 0x3E,
556 				    0x27, 0xD2, 0x60, 0x4B},
557 			   .length = 32},
558 	},
559 	{
560 		.prime = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
563 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
564 				   0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
565 				   0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
566 				   0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
567 			  .length = 48},
568 		.order = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
569 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
570 				   0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
571 				   0xFF, 0xFF, 0xFF, 0xC7, 0x63, 0x4D, 0x81,
572 				   0xF4, 0x37, 0x2D, 0xDF, 0x58, 0x1A, 0x0D,
573 				   0xB2, 0x48, 0xB0, 0xA7, 0x7A, 0xEC, 0xEC,
574 				   0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73},
575 			  .length = 48},
576 		.consta = {.data = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
579 				    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
580 				    0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF,
581 				    0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
582 				    0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFC},
583 			   .length = 48},
584 		.constb = {.data = {0xB3, 0x31, 0x2F, 0xA7, 0xE2, 0x3E, 0xE7,
585 				    0xE4, 0x98, 0x8E, 0x05, 0x6B, 0xE3, 0xF8,
586 				    0x2D, 0x19, 0x18, 0x1D, 0x9C, 0x6E, 0xFE,
587 				    0x81, 0x41, 0x12, 0x03, 0x14, 0x08, 0x8F,
588 				    0x50, 0x13, 0x87, 0x5A, 0xC6, 0x56, 0x39,
589 				    0x8D, 0x8A, 0x2E, 0xD1, 0x9D, 0x2A, 0x85,
590 				    0xC8, 0xED, 0xD3, 0xEC, 0x2A, 0xEF},
591 			   .length = 48},
592 	},
593 	{.prime = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
594 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
595 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
596 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
597 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
598 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
599 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
600 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
601 			    0xFF, 0xFF},
602 		   .length = 66},
603 	 .order = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
604 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
605 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
606 			    0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
607 			    0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
608 			    0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
609 			    0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
610 			    0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
611 			    0x64, 0x09},
612 		   .length = 66},
613 	 .consta = {.data = {0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
614 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
615 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
616 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
617 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
618 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
619 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
620 			     0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
621 			     0xFF, 0xFC},
622 		    .length = 66},
623 	 .constb = {.data = {0x00, 0x51, 0x95, 0x3E, 0xB9, 0x61, 0x8E, 0x1C,
624 			     0x9A, 0x1F, 0x92, 0x9A, 0x21, 0xA0, 0xB6, 0x85,
625 			     0x40, 0xEE, 0xA2, 0xDA, 0x72, 0x5B, 0x99, 0xB3,
626 			     0x15, 0xF3, 0xB8, 0xB4, 0x89, 0x91, 0x8E, 0xF1,
627 			     0x09, 0xE1, 0x56, 0x19, 0x39, 0x51, 0xEC, 0x7E,
628 			     0x93, 0x7B, 0x16, 0x52, 0xC0, 0xBD, 0x3B, 0xB1,
629 			     0xBF, 0x07, 0x35, 0x73, 0xDF, 0x88, 0x3D, 0x2C,
630 			     0x34, 0xF1, 0xEF, 0x45, 0x1F, 0xD4, 0x6B, 0x50,
631 			     0x3F, 0x00},
632 		    .length = 66}}};
633 
634 static __rte_always_inline void
635 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
636 		    struct asym_op_params *ecdsa_params,
637 		    uint64_t fpm_table_iova,
638 		    uint8_t curveid)
639 {
640 	struct cpt_request_info *req = ecdsa_params->req;
641 	uint16_t message_len = ecdsa->message.length;
642 	phys_addr_t mphys = ecdsa_params->meta_buf;
643 	uint16_t pkey_len = ecdsa->pkey.length;
644 	uint16_t p_align, k_align, m_align;
645 	uint16_t k_len = ecdsa->k.length;
646 	uint16_t order_len, prime_len;
647 	uint16_t o_offset, pk_offset;
648 	vq_cmd_word0_t vq_cmd_w0;
649 	uint16_t rlen, dlen;
650 	buf_ptr_t caddr;
651 	uint8_t *dptr;
652 
653 	prime_len = ec_grp[curveid].prime.length;
654 	order_len = ec_grp[curveid].order.length;
655 
656 	/* Truncate input length to curve prime length */
657 	if (message_len > prime_len)
658 		message_len = prime_len;
659 	m_align = RTE_ALIGN_CEIL(message_len, 8);
660 
661 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
662 	k_align = RTE_ALIGN_CEIL(k_len, 8);
663 
664 	/* Set write offset for order and private key */
665 	o_offset = prime_len - order_len;
666 	pk_offset = prime_len - pkey_len;
667 
668 	/* Input buffer */
669 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
670 
671 	/*
672 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
673 	 * ROUNDUP8(priv key len, prime len, order len)).
674 	 * Please note, private key, order cannot exceed prime
675 	 * length i.e 3 * p_align.
676 	 */
677 	dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 5;
678 
679 	memset(dptr, 0, dlen);
680 
681 	*(uint64_t *)dptr = fpm_table_iova;
682 	dptr += sizeof(fpm_table_iova);
683 
684 	memcpy(dptr, ecdsa->k.data, k_len);
685 	dptr += k_align;
686 
687 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
688 	dptr += p_align;
689 
690 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
691 	dptr += p_align;
692 
693 	memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
694 	dptr += p_align;
695 
696 	memcpy(dptr, ecdsa->message.data, message_len);
697 	dptr += m_align;
698 
699 	memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
700 	dptr += p_align;
701 
702 	memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
703 	dptr += p_align;
704 
705 	/* 2 * prime length (for sign r and s ) */
706 	rlen = 2 * p_align;
707 
708 	/* Setup opcodes */
709 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
710 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
711 
712 	/* GP op header */
713 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
714 	vq_cmd_w0.s.param2 = (pkey_len << 8) | k_len;
715 	vq_cmd_w0.s.dlen = dlen;
716 
717 	/* Filling cpt_request_info structure */
718 	req->ist.ei0 = vq_cmd_w0.u64;
719 	req->ist.ei1 = mphys;
720 	req->ist.ei2 = mphys + dlen;
721 
722 	/* Result pointer to store result data */
723 	req->rptr = dptr;
724 
725 	/* alternate_caddr to write completion status of the microcode */
726 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
727 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
728 
729 	/* Preparing completion addr, +1 for completion code */
730 	caddr.vaddr = dptr + rlen + 1;
731 	caddr.dma_addr = mphys + dlen + rlen + 1;
732 
733 	cpt_fill_req_comp_addr(req, caddr);
734 }
735 
736 static __rte_always_inline void
737 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
738 		      struct asym_op_params *ecdsa_params,
739 		      uint64_t fpm_table_iova,
740 		      uint8_t curveid)
741 {
742 	struct cpt_request_info *req = ecdsa_params->req;
743 	uint32_t message_len = ecdsa->message.length;
744 	phys_addr_t mphys = ecdsa_params->meta_buf;
745 	uint16_t o_offset, r_offset, s_offset;
746 	uint16_t qx_len = ecdsa->q.x.length;
747 	uint16_t qy_len = ecdsa->q.y.length;
748 	uint16_t r_len = ecdsa->r.length;
749 	uint16_t s_len = ecdsa->s.length;
750 	uint16_t order_len, prime_len;
751 	uint16_t qx_offset, qy_offset;
752 	uint16_t p_align, m_align;
753 	vq_cmd_word0_t vq_cmd_w0;
754 	buf_ptr_t caddr;
755 	uint16_t dlen;
756 	uint8_t *dptr;
757 
758 	prime_len = ec_grp[curveid].prime.length;
759 	order_len = ec_grp[curveid].order.length;
760 
761 	/* Truncate input length to curve prime length */
762 	if (message_len > prime_len)
763 		message_len = prime_len;
764 
765 	m_align = RTE_ALIGN_CEIL(message_len, 8);
766 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
767 
768 	/* Set write offset for sign, order and public key coordinates */
769 	o_offset = prime_len - order_len;
770 	qx_offset = prime_len - qx_len;
771 	qy_offset = prime_len - qy_len;
772 	r_offset = prime_len - r_len;
773 	s_offset = prime_len - s_len;
774 
775 	/* Input buffer */
776 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
777 
778 	/*
779 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
780 	 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
781 	 * prime len, order len)).
782 	 * Please note sign, public key and order can not excede prime length
783 	 * i.e. 6 * p_align
784 	 */
785 	dlen = sizeof(fpm_table_iova) + m_align + (8 * p_align);
786 
787 	memset(dptr, 0, dlen);
788 
789 	*(uint64_t *)dptr = fpm_table_iova;
790 	dptr += sizeof(fpm_table_iova);
791 
792 	memcpy(dptr + r_offset, ecdsa->r.data, r_len);
793 	dptr += p_align;
794 
795 	memcpy(dptr + s_offset, ecdsa->s.data, s_len);
796 	dptr += p_align;
797 
798 	memcpy(dptr, ecdsa->message.data, message_len);
799 	dptr += m_align;
800 
801 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
802 	dptr += p_align;
803 
804 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
805 	dptr += p_align;
806 
807 	memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
808 	dptr += p_align;
809 
810 	memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
811 	dptr += p_align;
812 
813 	memcpy(dptr, ec_grp[curveid].consta.data, prime_len);
814 	dptr += p_align;
815 
816 	memcpy(dptr, ec_grp[curveid].constb.data, prime_len);
817 	dptr += p_align;
818 
819 	/* Setup opcodes */
820 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
821 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
822 
823 	/* GP op header */
824 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
825 	vq_cmd_w0.s.param2 = 0;
826 	vq_cmd_w0.s.dlen = dlen;
827 
828 	/* Filling cpt_request_info structure */
829 	req->ist.ei0 = vq_cmd_w0.u64;
830 	req->ist.ei1 = mphys;
831 	req->ist.ei2 = mphys + dlen;
832 
833 	/* Result pointer to store result data */
834 	req->rptr = dptr;
835 
836 	/* alternate_caddr to write completion status of the microcode */
837 	req->alternate_caddr = (uint64_t *)dptr;
838 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
839 
840 	/* Preparing completion addr, +1 for completion code */
841 	caddr.vaddr = dptr + 1;
842 	caddr.dma_addr = mphys + dlen + 1;
843 
844 	cpt_fill_req_comp_addr(req, caddr);
845 }
846 
847 static __rte_always_inline int __rte_hot
848 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
849 		     struct asym_op_params *params,
850 		     struct cpt_asym_sess_misc *sess,
851 		     uint64_t *fpm_iova)
852 {
853 	struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
854 	uint8_t curveid = sess->ec_ctx.curveid;
855 
856 	if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
857 		cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
858 	else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
859 		cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
860 				      curveid);
861 	else {
862 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
863 		return -EINVAL;
864 	}
865 	return 0;
866 }
867 
868 static __rte_always_inline int
869 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
870 	      struct asym_op_params *asym_params,
871 	      uint8_t curveid)
872 {
873 	struct cpt_request_info *req = asym_params->req;
874 	phys_addr_t mphys = asym_params->meta_buf;
875 	uint16_t x1_len = ecpm->p.x.length;
876 	uint16_t y1_len = ecpm->p.y.length;
877 	uint16_t scalar_align, p_align;
878 	uint16_t dlen, rlen, prime_len;
879 	uint16_t x1_offset, y1_offset;
880 	vq_cmd_word0_t vq_cmd_w0;
881 	buf_ptr_t caddr;
882 	uint8_t *dptr;
883 
884 	prime_len = ec_grp[curveid].prime.length;
885 
886 	/* Input buffer */
887 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
888 
889 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
890 	scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
891 
892 	/*
893 	 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
894 	 * scalar length),
895 	 * Please note point length is equivalent to prime of the curve
896 	 */
897 	dlen = 5 * p_align + scalar_align;
898 
899 	x1_offset = prime_len - x1_len;
900 	y1_offset = prime_len - y1_len;
901 
902 	memset(dptr, 0, dlen);
903 
904 	/* Copy input point, scalar, prime */
905 	memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
906 	dptr += p_align;
907 	memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
908 	dptr += p_align;
909 	memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
910 	dptr += scalar_align;
911 	memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
912 	dptr += p_align;
913 
914 	memcpy(dptr, ec_grp[curveid].consta.data,
915 	       ec_grp[curveid].consta.length);
916 	dptr += p_align;
917 
918 	memcpy(dptr, ec_grp[curveid].constb.data,
919 	       ec_grp[curveid].constb.length);
920 	dptr += p_align;
921 
922 	/* Setup opcodes */
923 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
924 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
925 
926 	/* GP op header */
927 	vq_cmd_w0.s.param1 = curveid;
928 	vq_cmd_w0.s.param2 = ecpm->scalar.length;
929 	vq_cmd_w0.s.dlen = dlen;
930 
931 	/* Filling cpt_request_info structure */
932 	req->ist.ei0 = vq_cmd_w0.u64;
933 	req->ist.ei1 = mphys;
934 	req->ist.ei2 = mphys + dlen;
935 
936 	/* Result buffer will store output point where length of
937 	 * each coordinate will be of prime length, thus set
938 	 * rlen to twice of prime length.
939 	 */
940 	rlen = p_align << 1;
941 	req->rptr = dptr;
942 
943 	/* alternate_caddr to write completion status by the microcode */
944 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
945 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
946 
947 	/* Preparing completion addr, +1 for completion code */
948 	caddr.vaddr = dptr + rlen + 1;
949 	caddr.dma_addr = mphys + dlen + rlen + 1;
950 
951 	cpt_fill_req_comp_addr(req, caddr);
952 	return 0;
953 }
954 #endif /* _CPT_UCODE_ASYM_H_ */
955