xref: /dpdk/drivers/common/cpt/cpt_ucode_asym.h (revision 68a03efeed657e6e05f281479b33b51102797e15)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright (C) 2019 Marvell International Ltd.
3  */
4 
5 #ifndef _CPT_UCODE_ASYM_H_
6 #define _CPT_UCODE_ASYM_H_
7 
8 #include <rte_common.h>
9 #include <rte_crypto_asym.h>
10 #include <rte_malloc.h>
11 
12 #include "cpt_common.h"
13 #include "cpt_hw_types.h"
14 #include "cpt_mcode_defines.h"
15 
16 static __rte_always_inline void
17 cpt_modex_param_normalize(uint8_t **data, size_t *len)
18 {
19 	size_t i;
20 
21 	/* Strip leading NUL bytes */
22 
23 	for (i = 0; i < *len; i++) {
24 		if ((*data)[i] != 0)
25 			break;
26 	}
27 
28 	*data += i;
29 	*len -= i;
30 }
31 
32 static __rte_always_inline int
33 cpt_fill_modex_params(struct cpt_asym_sess_misc *sess,
34 		      struct rte_crypto_asym_xform *xform)
35 {
36 	struct rte_crypto_modex_xform *ctx = &sess->mod_ctx;
37 	size_t exp_len = xform->modex.exponent.length;
38 	size_t mod_len = xform->modex.modulus.length;
39 	uint8_t *exp = xform->modex.exponent.data;
40 	uint8_t *mod = xform->modex.modulus.data;
41 
42 	cpt_modex_param_normalize(&mod, &mod_len);
43 	cpt_modex_param_normalize(&exp, &exp_len);
44 
45 	if (unlikely(exp_len == 0 || mod_len == 0))
46 		return -EINVAL;
47 
48 	if (unlikely(exp_len > mod_len)) {
49 		CPT_LOG_DP_ERR("Exponent length greater than modulus length is not supported");
50 		return -ENOTSUP;
51 	}
52 
53 	/* Allocate buffer to hold modexp params */
54 	ctx->modulus.data = rte_malloc(NULL, mod_len + exp_len, 0);
55 	if (ctx->modulus.data == NULL) {
56 		CPT_LOG_DP_ERR("Could not allocate buffer for modex params");
57 		return -ENOMEM;
58 	}
59 
60 	/* Set up modexp prime modulus and private exponent */
61 
62 	memcpy(ctx->modulus.data, mod, mod_len);
63 	ctx->exponent.data = ctx->modulus.data + mod_len;
64 	memcpy(ctx->exponent.data, exp, exp_len);
65 
66 	ctx->modulus.length = mod_len;
67 	ctx->exponent.length = exp_len;
68 
69 	return 0;
70 }
71 
72 static __rte_always_inline int
73 cpt_fill_rsa_params(struct cpt_asym_sess_misc *sess,
74 		    struct rte_crypto_asym_xform *xform)
75 {
76 	struct rte_crypto_rsa_priv_key_qt qt = xform->rsa.qt;
77 	struct rte_crypto_rsa_xform *xfrm_rsa = &xform->rsa;
78 	struct rte_crypto_rsa_xform *rsa = &sess->rsa_ctx;
79 	size_t mod_len = xfrm_rsa->n.length;
80 	size_t exp_len = xfrm_rsa->e.length;
81 	uint64_t total_size;
82 	size_t len = 0;
83 
84 	/* Make sure key length used is not more than mod_len/2 */
85 	if (qt.p.data != NULL)
86 		len = (((mod_len / 2) < qt.p.length) ? len : qt.p.length);
87 
88 	/* Total size required for RSA key params(n,e,(q,dQ,p,dP,qInv)) */
89 	total_size = mod_len + exp_len + 5 * len;
90 
91 	/* Allocate buffer to hold all RSA keys */
92 	rsa->n.data = rte_malloc(NULL, total_size, 0);
93 	if (rsa->n.data == NULL) {
94 		CPT_LOG_DP_ERR("Could not allocate buffer for RSA keys");
95 		return -ENOMEM;
96 	}
97 
98 	/* Set up RSA prime modulus and public key exponent */
99 	memcpy(rsa->n.data, xfrm_rsa->n.data, mod_len);
100 	rsa->e.data = rsa->n.data + mod_len;
101 	memcpy(rsa->e.data, xfrm_rsa->e.data, exp_len);
102 
103 	/* Private key in quintuple format */
104 	if (len != 0) {
105 		rsa->qt.q.data = rsa->e.data + exp_len;
106 		memcpy(rsa->qt.q.data, qt.q.data, qt.q.length);
107 		rsa->qt.dQ.data = rsa->qt.q.data + qt.q.length;
108 		memcpy(rsa->qt.dQ.data, qt.dQ.data, qt.dQ.length);
109 		rsa->qt.p.data = rsa->qt.dQ.data + qt.dQ.length;
110 		memcpy(rsa->qt.p.data, qt.p.data, qt.p.length);
111 		rsa->qt.dP.data = rsa->qt.p.data + qt.p.length;
112 		memcpy(rsa->qt.dP.data, qt.dP.data, qt.dP.length);
113 		rsa->qt.qInv.data = rsa->qt.dP.data + qt.dP.length;
114 		memcpy(rsa->qt.qInv.data, qt.qInv.data, qt.qInv.length);
115 
116 		rsa->qt.q.length = qt.q.length;
117 		rsa->qt.dQ.length = qt.dQ.length;
118 		rsa->qt.p.length = qt.p.length;
119 		rsa->qt.dP.length = qt.dP.length;
120 		rsa->qt.qInv.length = qt.qInv.length;
121 	}
122 	rsa->n.length = mod_len;
123 	rsa->e.length = exp_len;
124 
125 	return 0;
126 }
127 
128 static __rte_always_inline int
129 cpt_fill_ec_params(struct cpt_asym_sess_misc *sess,
130 		      struct rte_crypto_asym_xform *xform)
131 {
132 	struct cpt_asym_ec_ctx *ec = &sess->ec_ctx;
133 
134 	switch (xform->ec.curve_id) {
135 	case RTE_CRYPTO_EC_GROUP_SECP192R1:
136 		ec->curveid = CPT_EC_ID_P192;
137 		break;
138 	case RTE_CRYPTO_EC_GROUP_SECP224R1:
139 		ec->curveid = CPT_EC_ID_P224;
140 		break;
141 	case RTE_CRYPTO_EC_GROUP_SECP256R1:
142 		ec->curveid = CPT_EC_ID_P256;
143 		break;
144 	case RTE_CRYPTO_EC_GROUP_SECP384R1:
145 		ec->curveid = CPT_EC_ID_P384;
146 		break;
147 	case RTE_CRYPTO_EC_GROUP_SECP521R1:
148 		ec->curveid = CPT_EC_ID_P521;
149 		break;
150 	default:
151 		/* Only NIST curves (FIPS 186-4) are supported */
152 		CPT_LOG_DP_ERR("Unsupported curve");
153 		return -EINVAL;
154 	}
155 
156 	return 0;
157 }
158 
159 static __rte_always_inline int
160 cpt_fill_asym_session_parameters(struct cpt_asym_sess_misc *sess,
161 				 struct rte_crypto_asym_xform *xform)
162 {
163 	int ret;
164 
165 	sess->xfrm_type = xform->xform_type;
166 
167 	switch (xform->xform_type) {
168 	case RTE_CRYPTO_ASYM_XFORM_RSA:
169 		ret = cpt_fill_rsa_params(sess, xform);
170 		break;
171 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
172 		ret = cpt_fill_modex_params(sess, xform);
173 		break;
174 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
175 		/* Fall through */
176 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
177 		ret = cpt_fill_ec_params(sess, xform);
178 		break;
179 	default:
180 		CPT_LOG_DP_ERR("Unsupported transform type");
181 		return -ENOTSUP;
182 	}
183 	return ret;
184 }
185 
186 static __rte_always_inline void
187 cpt_free_asym_session_parameters(struct cpt_asym_sess_misc *sess)
188 {
189 	struct rte_crypto_modex_xform *mod;
190 	struct rte_crypto_rsa_xform *rsa;
191 
192 	switch (sess->xfrm_type) {
193 	case RTE_CRYPTO_ASYM_XFORM_RSA:
194 		rsa = &sess->rsa_ctx;
195 		if (rsa->n.data)
196 			rte_free(rsa->n.data);
197 		break;
198 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
199 		mod = &sess->mod_ctx;
200 		if (mod->modulus.data)
201 			rte_free(mod->modulus.data);
202 		break;
203 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
204 		/* Fall through */
205 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
206 		break;
207 	default:
208 		CPT_LOG_DP_ERR("Invalid transform type");
209 		break;
210 	}
211 }
212 
213 static __rte_always_inline void
214 cpt_fill_req_comp_addr(struct cpt_request_info *req, buf_ptr_t addr)
215 {
216 	void *completion_addr = RTE_PTR_ALIGN(addr.vaddr, 16);
217 
218 	/* Pointer to cpt_res_s, updated by CPT */
219 	req->completion_addr = (volatile uint64_t *)completion_addr;
220 	req->comp_baddr = addr.dma_addr +
221 			  RTE_PTR_DIFF(completion_addr, addr.vaddr);
222 	*(req->completion_addr) = COMPLETION_CODE_INIT;
223 }
224 
225 static __rte_always_inline int
226 cpt_modex_prep(struct asym_op_params *modex_params,
227 	       struct rte_crypto_modex_xform *mod)
228 {
229 	struct cpt_request_info *req = modex_params->req;
230 	phys_addr_t mphys = modex_params->meta_buf;
231 	uint32_t exp_len = mod->exponent.length;
232 	uint32_t mod_len = mod->modulus.length;
233 	struct rte_crypto_mod_op_param mod_op;
234 	struct rte_crypto_op **op;
235 	vq_cmd_word0_t vq_cmd_w0;
236 	uint64_t total_key_len;
237 	uint32_t dlen, rlen;
238 	uint32_t base_len;
239 	buf_ptr_t caddr;
240 	uint8_t *dptr;
241 
242 	/* Extracting modex op form params->req->op[1]->asym->modex */
243 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
244 	mod_op = ((struct rte_crypto_op *)*op)->asym->modex;
245 
246 	base_len = mod_op.base.length;
247 	if (unlikely(base_len > mod_len)) {
248 		CPT_LOG_DP_ERR("Base length greater than modulus length is not supported");
249 		(*op)->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
250 		return -ENOTSUP;
251 	}
252 
253 	total_key_len = mod_len + exp_len;
254 
255 	/* Input buffer */
256 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
257 	memcpy(dptr, mod->modulus.data, total_key_len);
258 	dptr += total_key_len;
259 	memcpy(dptr, mod_op.base.data, base_len);
260 	dptr += base_len;
261 	dlen = total_key_len + base_len;
262 
263 	/* Result buffer */
264 	rlen = mod_len;
265 
266 	/* Setup opcodes */
267 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
268 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
269 
270 	/* GP op header */
271 	vq_cmd_w0.s.param1 = mod_len;
272 	vq_cmd_w0.s.param2 = exp_len;
273 	vq_cmd_w0.s.dlen = dlen;
274 
275 	/* Filling cpt_request_info structure */
276 	req->ist.ei0 = vq_cmd_w0.u64;
277 	req->ist.ei1 = mphys;
278 	req->ist.ei2 = mphys + dlen;
279 
280 	/* Result pointer to store result data */
281 	req->rptr = dptr;
282 
283 	/* alternate_caddr to write completion status of the microcode */
284 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
285 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
286 
287 	/* Preparing completion addr, +1 for completion code */
288 	caddr.vaddr = dptr + rlen + 1;
289 	caddr.dma_addr = mphys + dlen + rlen + 1;
290 
291 	cpt_fill_req_comp_addr(req, caddr);
292 	return 0;
293 }
294 
295 static __rte_always_inline void
296 cpt_rsa_prep(struct asym_op_params *rsa_params,
297 	     struct rte_crypto_rsa_xform *rsa,
298 	     rte_crypto_param *crypto_param)
299 {
300 	struct cpt_request_info *req = rsa_params->req;
301 	phys_addr_t mphys = rsa_params->meta_buf;
302 	struct rte_crypto_rsa_op_param rsa_op;
303 	uint32_t mod_len = rsa->n.length;
304 	uint32_t exp_len = rsa->e.length;
305 	struct rte_crypto_op **op;
306 	vq_cmd_word0_t vq_cmd_w0;
307 	uint64_t total_key_len;
308 	uint32_t dlen, rlen;
309 	uint32_t in_size;
310 	buf_ptr_t caddr;
311 	uint8_t *dptr;
312 
313 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
314 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
315 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
316 	total_key_len  = mod_len + exp_len;
317 
318 	/* Input buffer */
319 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
320 	memcpy(dptr, rsa->n.data, total_key_len);
321 	dptr += total_key_len;
322 
323 	in_size = crypto_param->length;
324 	memcpy(dptr, crypto_param->data, in_size);
325 
326 	dptr += in_size;
327 	dlen = total_key_len + in_size;
328 
329 	/* Result buffer */
330 	rlen = mod_len;
331 
332 	if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
333 		/* Use mod_exp operation for no_padding type */
334 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX;
335 		vq_cmd_w0.s.param2 = exp_len;
336 	} else {
337 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
338 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC;
339 			/* Public key encrypt, use BT2*/
340 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2 |
341 					((uint16_t)(exp_len) << 1);
342 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_VERIFY) {
343 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC;
344 			/* Public key decrypt, use BT1 */
345 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
346 			/* + 2 for decrypted len */
347 			rlen += 2;
348 		}
349 	}
350 
351 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
352 
353 	/* GP op header */
354 	vq_cmd_w0.s.param1 = mod_len;
355 	vq_cmd_w0.s.dlen = dlen;
356 
357 	/* Filling cpt_request_info structure */
358 	req->ist.ei0 = vq_cmd_w0.u64;
359 	req->ist.ei1 = mphys;
360 	req->ist.ei2 = mphys + dlen;
361 
362 	/* Result pointer to store result data */
363 	req->rptr = dptr;
364 
365 	/* alternate_caddr to write completion status of the microcode */
366 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
367 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
368 
369 	/* Preparing completion addr, +1 for completion code */
370 	caddr.vaddr = dptr + rlen + 1;
371 	caddr.dma_addr = mphys + dlen + rlen + 1;
372 
373 	cpt_fill_req_comp_addr(req, caddr);
374 }
375 
376 static __rte_always_inline void
377 cpt_rsa_crt_prep(struct asym_op_params *rsa_params,
378 		 struct rte_crypto_rsa_xform *rsa,
379 		 rte_crypto_param *crypto_param)
380 {
381 	struct cpt_request_info *req = rsa_params->req;
382 	phys_addr_t mphys = rsa_params->meta_buf;
383 	uint32_t qInv_len = rsa->qt.qInv.length;
384 	struct rte_crypto_rsa_op_param rsa_op;
385 	uint32_t dP_len = rsa->qt.dP.length;
386 	uint32_t dQ_len = rsa->qt.dQ.length;
387 	uint32_t p_len = rsa->qt.p.length;
388 	uint32_t q_len = rsa->qt.q.length;
389 	uint32_t mod_len = rsa->n.length;
390 	struct rte_crypto_op **op;
391 	vq_cmd_word0_t vq_cmd_w0;
392 	uint64_t total_key_len;
393 	uint32_t dlen, rlen;
394 	uint32_t in_size;
395 	buf_ptr_t caddr;
396 	uint8_t *dptr;
397 
398 	/* Extracting rsa op form params->req->op[1]->asym->rsa */
399 	op = RTE_PTR_ADD(req->op, sizeof(uintptr_t));
400 	rsa_op = ((struct rte_crypto_op *)*op)->asym->rsa;
401 	total_key_len = p_len + q_len + dP_len + dQ_len + qInv_len;
402 
403 	/* Input buffer */
404 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
405 	memcpy(dptr, rsa->qt.q.data, total_key_len);
406 	dptr += total_key_len;
407 
408 	in_size = crypto_param->length;
409 	memcpy(dptr, crypto_param->data, in_size);
410 
411 	dptr += in_size;
412 	dlen = total_key_len + in_size;
413 
414 	/* Result buffer */
415 	rlen = mod_len;
416 
417 	if (rsa_op.pad == RTE_CRYPTO_RSA_PADDING_NONE) {
418 		/*Use mod_exp operation for no_padding type */
419 		vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_MODEX_CRT;
420 	} else {
421 		if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
422 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_ENC_CRT;
423 			/* Private encrypt, use BT1 */
424 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE1;
425 		} else if (rsa_op.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
426 			vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_PKCS_DEC_CRT;
427 			/* Private decrypt, use BT2 */
428 			vq_cmd_w0.s.param2 = CPT_BLOCK_TYPE2;
429 			/* + 2 for decrypted len */
430 			rlen += 2;
431 		}
432 	}
433 
434 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_MODEX;
435 
436 	/* GP op header */
437 	vq_cmd_w0.s.param1 = mod_len;
438 	vq_cmd_w0.s.dlen = dlen;
439 
440 	/* Filling cpt_request_info structure */
441 	req->ist.ei0 = vq_cmd_w0.u64;
442 	req->ist.ei1 = mphys;
443 	req->ist.ei2 = mphys + dlen;
444 
445 	/* Result pointer to store result data */
446 	req->rptr = dptr;
447 
448 	/* alternate_caddr to write completion status of the microcode */
449 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
450 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
451 
452 	/* Preparing completion addr, +1 for completion code */
453 	caddr.vaddr = dptr + rlen + 1;
454 	caddr.dma_addr = mphys + dlen + rlen + 1;
455 
456 	cpt_fill_req_comp_addr(req, caddr);
457 }
458 
459 static __rte_always_inline int __rte_hot
460 cpt_enqueue_rsa_op(struct rte_crypto_op *op,
461 	       struct asym_op_params *params,
462 	       struct cpt_asym_sess_misc *sess)
463 {
464 	struct rte_crypto_rsa_op_param *rsa = &op->asym->rsa;
465 
466 	switch (rsa->op_type) {
467 	case RTE_CRYPTO_ASYM_OP_VERIFY:
468 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->sign);
469 		break;
470 	case RTE_CRYPTO_ASYM_OP_ENCRYPT:
471 		cpt_rsa_prep(params, &sess->rsa_ctx, &rsa->message);
472 		break;
473 	case RTE_CRYPTO_ASYM_OP_SIGN:
474 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->message);
475 		break;
476 	case RTE_CRYPTO_ASYM_OP_DECRYPT:
477 		cpt_rsa_crt_prep(params, &sess->rsa_ctx, &rsa->cipher);
478 		break;
479 	default:
480 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
481 		return -EINVAL;
482 	}
483 	return 0;
484 }
485 
486 static const struct cpt_ec_group ec_grp[CPT_EC_ID_PMAX] = {
487 	{
488 		.prime = {
489 			.data =	{
490 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
491 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
492 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
493 			},
494 			.length = 24,
495 		},
496 		.order = {
497 			.data = {
498 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
499 				0xFF, 0xFF, 0xFF, 0xFF, 0x99, 0xDE, 0xF8, 0x36,
500 				0x14, 0x6B, 0xC9, 0xB1, 0xB4, 0xD2, 0x28, 0x31
501 			},
502 			.length = 24
503 		},
504 	},
505 	{
506 		.prime = {
507 			.data = {
508 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
509 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
510 				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
511 				0x00, 0x00, 0x00, 0x01
512 			},
513 			.length = 28
514 		},
515 		.order = {
516 			.data = {
517 				0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF,
518 				0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0XFF, 0X16, 0XA2,
519 				0XE0, 0XB8, 0XF0, 0X3E, 0X13, 0XDD, 0X29, 0X45,
520 				0X5C, 0X5C, 0X2A, 0X3D
521 			},
522 			.length = 28
523 		},
524 	},
525 	{
526 		.prime = {
527 			.data = {
528 				0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
529 				0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
530 				0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF,
531 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF
532 			},
533 			.length = 32
534 		},
535 		.order = {
536 			.data = {
537 				0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
538 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
539 				0xBC, 0xE6, 0xFA, 0xAD, 0xA7, 0x17, 0x9E, 0x84,
540 				0xF3, 0xB9, 0xCA, 0xC2, 0xFC, 0x63, 0x25, 0x51
541 			},
542 			.length = 32
543 		},
544 	},
545 	{
546 		.prime = {
547 			.data = {
548 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
549 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
550 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
551 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
552 				0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
553 				0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF
554 			},
555 			.length = 48
556 		},
557 		.order = {
558 			.data = {
559 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
560 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
561 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
562 				0xC7, 0x63, 0x4D, 0x81, 0xF4, 0x37, 0x2D, 0xDF,
563 				0x58, 0x1A, 0x0D, 0xB2, 0x48, 0xB0, 0xA7, 0x7A,
564 				0xEC, 0xEC, 0x19, 0x6A, 0xCC, 0xC5, 0x29, 0x73
565 			},
566 			.length = 48
567 		}
568 	},
569 	{
570 		.prime = {
571 			.data = {
572 				0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
573 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
574 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
575 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
576 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
577 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
578 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
579 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
580 				0xFF, 0xFF
581 			},
582 			.length = 66
583 		},
584 		.order = {
585 			.data = {
586 				0x01, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
587 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
588 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
589 				0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
590 				0xFF, 0xFA, 0x51, 0x86, 0x87, 0x83, 0xBF, 0x2F,
591 				0x96, 0x6B, 0x7F, 0xCC, 0x01, 0x48, 0xF7, 0x09,
592 				0xA5, 0xD0, 0x3B, 0xB5, 0xC9, 0xB8, 0x89, 0x9C,
593 				0x47, 0xAE, 0xBB, 0x6F, 0xB7, 0x1E, 0x91, 0x38,
594 				0x64, 0x09
595 			},
596 			.length = 66
597 		}
598 	}
599 };
600 
601 static __rte_always_inline void
602 cpt_ecdsa_sign_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
603 		    struct asym_op_params *ecdsa_params,
604 		    uint64_t fpm_table_iova,
605 		    uint8_t curveid)
606 {
607 	struct cpt_request_info *req = ecdsa_params->req;
608 	uint16_t message_len = ecdsa->message.length;
609 	phys_addr_t mphys = ecdsa_params->meta_buf;
610 	uint16_t pkey_len = ecdsa->pkey.length;
611 	uint16_t p_align, k_align, m_align;
612 	uint16_t k_len = ecdsa->k.length;
613 	uint16_t order_len, prime_len;
614 	uint16_t o_offset, pk_offset;
615 	vq_cmd_word0_t vq_cmd_w0;
616 	uint16_t rlen, dlen;
617 	buf_ptr_t caddr;
618 	uint8_t *dptr;
619 
620 	prime_len = ec_grp[curveid].prime.length;
621 	order_len = ec_grp[curveid].order.length;
622 
623 	/* Truncate input length to curve prime length */
624 	if (message_len > prime_len)
625 		message_len = prime_len;
626 	m_align = RTE_ALIGN_CEIL(message_len, 8);
627 
628 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
629 	k_align = RTE_ALIGN_CEIL(k_len, 8);
630 
631 	/* Set write offset for order and private key */
632 	o_offset = prime_len - order_len;
633 	pk_offset = prime_len - pkey_len;
634 
635 	/* Input buffer */
636 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
637 
638 	/*
639 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(scalar len, input len),
640 	 * ROUNDUP8(priv key len, prime len, order len)).
641 	 * Please note, private key, order cannot exceed prime
642 	 * length i.e 3 * p_align.
643 	 */
644 	dlen = sizeof(fpm_table_iova) + k_align + m_align + p_align * 3;
645 
646 	memset(dptr, 0, dlen);
647 
648 	*(uint64_t *)dptr = fpm_table_iova;
649 	dptr += sizeof(fpm_table_iova);
650 
651 	memcpy(dptr, ecdsa->k.data, k_len);
652 	dptr += k_align;
653 
654 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
655 	dptr += p_align;
656 
657 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
658 	dptr += p_align;
659 
660 	memcpy(dptr + pk_offset, ecdsa->pkey.data, pkey_len);
661 	dptr += p_align;
662 
663 	memcpy(dptr, ecdsa->message.data, message_len);
664 	dptr += m_align;
665 
666 	/* 2 * prime length (for sign r and s ) */
667 	rlen = 2 * p_align;
668 
669 	/* Setup opcodes */
670 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
671 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_SIGN;
672 
673 	/* GP op header */
674 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
675 	vq_cmd_w0.s.param2 = k_len;
676 	vq_cmd_w0.s.dlen = dlen;
677 
678 	/* Filling cpt_request_info structure */
679 	req->ist.ei0 = vq_cmd_w0.u64;
680 	req->ist.ei1 = mphys;
681 	req->ist.ei2 = mphys + dlen;
682 
683 	/* Result pointer to store result data */
684 	req->rptr = dptr;
685 
686 	/* alternate_caddr to write completion status of the microcode */
687 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
688 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
689 
690 	/* Preparing completion addr, +1 for completion code */
691 	caddr.vaddr = dptr + rlen + 1;
692 	caddr.dma_addr = mphys + dlen + rlen + 1;
693 
694 	cpt_fill_req_comp_addr(req, caddr);
695 }
696 
697 static __rte_always_inline void
698 cpt_ecdsa_verify_prep(struct rte_crypto_ecdsa_op_param *ecdsa,
699 		      struct asym_op_params *ecdsa_params,
700 		      uint64_t fpm_table_iova,
701 		      uint8_t curveid)
702 {
703 	struct cpt_request_info *req = ecdsa_params->req;
704 	uint32_t message_len = ecdsa->message.length;
705 	phys_addr_t mphys = ecdsa_params->meta_buf;
706 	uint16_t o_offset, r_offset, s_offset;
707 	uint16_t qx_len = ecdsa->q.x.length;
708 	uint16_t qy_len = ecdsa->q.y.length;
709 	uint16_t r_len = ecdsa->r.length;
710 	uint16_t s_len = ecdsa->s.length;
711 	uint16_t order_len, prime_len;
712 	uint16_t qx_offset, qy_offset;
713 	uint16_t p_align, m_align;
714 	vq_cmd_word0_t vq_cmd_w0;
715 	buf_ptr_t caddr;
716 	uint16_t dlen;
717 	uint8_t *dptr;
718 
719 	prime_len = ec_grp[curveid].prime.length;
720 	order_len = ec_grp[curveid].order.length;
721 
722 	/* Truncate input length to curve prime length */
723 	if (message_len > prime_len)
724 		message_len = prime_len;
725 
726 	m_align = RTE_ALIGN_CEIL(message_len, 8);
727 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
728 
729 	/* Set write offset for sign, order and public key coordinates */
730 	o_offset = prime_len - order_len;
731 	qx_offset = prime_len - qx_len;
732 	qy_offset = prime_len - qy_len;
733 	r_offset = prime_len - r_len;
734 	s_offset = prime_len - s_len;
735 
736 	/* Input buffer */
737 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
738 
739 	/*
740 	 * Set dlen = sum(sizeof(fpm address), ROUNDUP8(message len),
741 	 * ROUNDUP8(sign len(r and s), public key len(x and y coordinates),
742 	 * prime len, order len)).
743 	 * Please note sign, public key and order can not excede prime length
744 	 * i.e. 6 * p_align
745 	 */
746 	dlen = sizeof(fpm_table_iova) + m_align + (6 * p_align);
747 
748 	memset(dptr, 0, dlen);
749 
750 	*(uint64_t *)dptr = fpm_table_iova;
751 	dptr += sizeof(fpm_table_iova);
752 
753 	memcpy(dptr + r_offset, ecdsa->r.data, r_len);
754 	dptr += p_align;
755 
756 	memcpy(dptr + s_offset, ecdsa->s.data, s_len);
757 	dptr += p_align;
758 
759 	memcpy(dptr, ecdsa->message.data, message_len);
760 	dptr += m_align;
761 
762 	memcpy(dptr + o_offset, ec_grp[curveid].order.data, order_len);
763 	dptr += p_align;
764 
765 	memcpy(dptr, ec_grp[curveid].prime.data, prime_len);
766 	dptr += p_align;
767 
768 	memcpy(dptr + qx_offset, ecdsa->q.x.data, qx_len);
769 	dptr += p_align;
770 
771 	memcpy(dptr + qy_offset, ecdsa->q.y.data, qy_len);
772 	dptr += p_align;
773 
774 	/* Setup opcodes */
775 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECDSA;
776 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECDSA_VERIFY;
777 
778 	/* GP op header */
779 	vq_cmd_w0.s.param1 = curveid | (message_len << 8);
780 	vq_cmd_w0.s.param2 = 0;
781 	vq_cmd_w0.s.dlen = dlen;
782 
783 	/* Filling cpt_request_info structure */
784 	req->ist.ei0 = vq_cmd_w0.u64;
785 	req->ist.ei1 = mphys;
786 	req->ist.ei2 = mphys + dlen;
787 
788 	/* Result pointer to store result data */
789 	req->rptr = dptr;
790 
791 	/* alternate_caddr to write completion status of the microcode */
792 	req->alternate_caddr = (uint64_t *)dptr;
793 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
794 
795 	/* Preparing completion addr, +1 for completion code */
796 	caddr.vaddr = dptr + 1;
797 	caddr.dma_addr = mphys + dlen + 1;
798 
799 	cpt_fill_req_comp_addr(req, caddr);
800 }
801 
802 static __rte_always_inline int __rte_hot
803 cpt_enqueue_ecdsa_op(struct rte_crypto_op *op,
804 		     struct asym_op_params *params,
805 		     struct cpt_asym_sess_misc *sess,
806 		     uint64_t *fpm_iova)
807 {
808 	struct rte_crypto_ecdsa_op_param *ecdsa = &op->asym->ecdsa;
809 	uint8_t curveid = sess->ec_ctx.curveid;
810 
811 	if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_SIGN)
812 		cpt_ecdsa_sign_prep(ecdsa, params, fpm_iova[curveid], curveid);
813 	else if (ecdsa->op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
814 		cpt_ecdsa_verify_prep(ecdsa, params, fpm_iova[curveid],
815 				      curveid);
816 	else {
817 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
818 		return -EINVAL;
819 	}
820 	return 0;
821 }
822 
823 static __rte_always_inline int
824 cpt_ecpm_prep(struct rte_crypto_ecpm_op_param *ecpm,
825 	      struct asym_op_params *asym_params,
826 	      uint8_t curveid)
827 {
828 	struct cpt_request_info *req = asym_params->req;
829 	phys_addr_t mphys = asym_params->meta_buf;
830 	uint16_t x1_len = ecpm->p.x.length;
831 	uint16_t y1_len = ecpm->p.y.length;
832 	uint16_t scalar_align, p_align;
833 	uint16_t dlen, rlen, prime_len;
834 	uint16_t x1_offset, y1_offset;
835 	vq_cmd_word0_t vq_cmd_w0;
836 	buf_ptr_t caddr;
837 	uint8_t *dptr;
838 
839 	prime_len = ec_grp[curveid].prime.length;
840 
841 	/* Input buffer */
842 	dptr = RTE_PTR_ADD(req, sizeof(struct cpt_request_info));
843 
844 	p_align = RTE_ALIGN_CEIL(prime_len, 8);
845 	scalar_align = RTE_ALIGN_CEIL(ecpm->scalar.length, 8);
846 
847 	/*
848 	 * Set dlen = sum(ROUNDUP8(input point(x and y coordinates), prime,
849 	 * scalar length),
850 	 * Please note point length is equivalent to prime of the curve
851 	 */
852 	dlen = 3 * p_align + scalar_align;
853 
854 	x1_offset = prime_len - x1_len;
855 	y1_offset = prime_len - y1_len;
856 
857 	memset(dptr, 0, dlen);
858 
859 	/* Copy input point, scalar, prime */
860 	memcpy(dptr + x1_offset, ecpm->p.x.data, x1_len);
861 	dptr += p_align;
862 	memcpy(dptr + y1_offset, ecpm->p.y.data, y1_len);
863 	dptr += p_align;
864 	memcpy(dptr, ecpm->scalar.data, ecpm->scalar.length);
865 	dptr += scalar_align;
866 	memcpy(dptr, ec_grp[curveid].prime.data, ec_grp[curveid].prime.length);
867 	dptr += p_align;
868 
869 	/* Setup opcodes */
870 	vq_cmd_w0.s.opcode.major = CPT_MAJOR_OP_ECC;
871 	vq_cmd_w0.s.opcode.minor = CPT_MINOR_OP_ECC_UMP;
872 
873 	/* GP op header */
874 	vq_cmd_w0.s.param1 = curveid;
875 	vq_cmd_w0.s.param2 = ecpm->scalar.length;
876 	vq_cmd_w0.s.dlen = dlen;
877 
878 	/* Filling cpt_request_info structure */
879 	req->ist.ei0 = vq_cmd_w0.u64;
880 	req->ist.ei1 = mphys;
881 	req->ist.ei2 = mphys + dlen;
882 
883 	/* Result buffer will store output point where length of
884 	 * each coordinate will be of prime length, thus set
885 	 * rlen to twice of prime length.
886 	 */
887 	rlen = p_align << 1;
888 	req->rptr = dptr;
889 
890 	/* alternate_caddr to write completion status by the microcode */
891 	req->alternate_caddr = (uint64_t *)(dptr + rlen);
892 	*req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT);
893 
894 	/* Preparing completion addr, +1 for completion code */
895 	caddr.vaddr = dptr + rlen + 1;
896 	caddr.dma_addr = mphys + dlen + rlen + 1;
897 
898 	cpt_fill_req_comp_addr(req, caddr);
899 	return 0;
900 }
901 #endif /* _CPT_UCODE_ASYM_H_ */
902