xref: /dpdk/drivers/crypto/qat/qat_asym.c (revision 20e633b0ca15539b682539a665e8d3dc0dc2c899)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2019 - 2022 Intel Corporation
3  */
4 
5 #include <stdarg.h>
6 
7 #include <cryptodev_pmd.h>
8 
9 #include "qat_device.h"
10 #include "qat_logs.h"
11 
12 #include "qat_asym.h"
13 #include "icp_qat_fw_pke.h"
14 #include "icp_qat_fw.h"
15 #include "qat_pke.h"
16 #include "qat_ec.h"
17 
18 #define ASYM_ENQ_THRESHOLD_NAME "qat_asym_enq_threshold"
19 #define RSA_MODULUS_2048_BITS 2048
20 
21 uint8_t qat_asym_driver_id;
22 
23 struct qat_crypto_gen_dev_ops qat_asym_gen_dev_ops[QAT_N_GENS];
24 
25 
26 static const char *const arguments[] = {
27 	ASYM_ENQ_THRESHOLD_NAME,
28 	NULL
29 };
30 
31 /* An rte_driver is needed in the registration of both the device and the driver
32  * with cryptodev.
33  * The actual qat pci's rte_driver can't be used as its name represents
34  * the whole pci device with all services. Think of this as a holder for a name
35  * for the crypto part of the pci device.
36  */
37 static const char qat_asym_drv_name[] = RTE_STR(CRYPTODEV_NAME_QAT_ASYM_PMD);
38 static const struct rte_driver cryptodev_qat_asym_driver = {
39 	.name = qat_asym_drv_name,
40 	.alias = qat_asym_drv_name
41 };
42 
43 /*
44  * Macros with suffix _F are used with some of predefinded identifiers:
45  * - cookie->input_buffer
46  * - qat_func_alignsize
47  */
48 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
49 #define HEXDUMP(name, where, size) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
50 			where, size)
51 #define HEXDUMP_OFF(name, where, size, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
52 			&where[idx * size], size)
53 
54 #define HEXDUMP_OFF_F(name, idx) QAT_DP_HEXDUMP_LOG(DEBUG, name, \
55 			&cookie->input_buffer[idx * qat_func_alignsize], \
56 			qat_func_alignsize)
57 #else
58 #define HEXDUMP(name, where, size)
59 #define HEXDUMP_OFF(name, where, size, idx)
60 #define HEXDUMP_OFF_F(name, idx)
61 #endif
62 
63 #define CHECK_IF_NOT_EMPTY(param, name, pname, status) \
64 	do { \
65 		if (param.length == 0) {	\
66 			QAT_LOG(ERR,			\
67 				"Invalid " name	\
68 				" input parameter, zero length " pname	\
69 			);	\
70 			status = -EINVAL;	\
71 		} else if (check_zero(param)) { \
72 			QAT_LOG(ERR,	\
73 				"Invalid " name " input parameter, empty " \
74 				pname ", length = %d", \
75 				(int)param.length \
76 			); \
77 			status = -EINVAL;	\
78 		} \
79 	} while (0)
80 
81 #define SET_PKE_LN(what, how, idx) \
82 	rte_memcpy(cookie->input_array[idx] + how - \
83 		what.length, \
84 		what.data, \
85 		what.length)
86 
87 #define SET_PKE_LN_EC(curve, p, idx) \
88 	rte_memcpy(cookie->input_array[idx] + \
89 		qat_func_alignsize - curve.bytesize, \
90 		curve.p.data, curve.bytesize)
91 
92 #define SET_PKE_9A_IN(what, idx) \
93 	rte_memcpy(&cookie->input_buffer[idx * \
94 		qat_func_alignsize] + \
95 		qat_func_alignsize - what.length, \
96 		what.data, what.length)
97 
98 #define SET_PKE_9A_EC(curve, p, idx) \
99 	rte_memcpy(&cookie->input_buffer[idx * \
100 		qat_func_alignsize] + \
101 		qat_func_alignsize - curve.bytesize, \
102 		curve.p.data, curve.bytesize)
103 
104 #define PARAM_CLR(what) \
105 	do { \
106 		memset(what.data, 0, what.length); \
107 		rte_free(what.data);	\
108 	} while (0)
109 
110 static void
111 request_init(struct icp_qat_fw_pke_request *qat_req)
112 {
113 	memset(qat_req, 0, sizeof(*qat_req));
114 	qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_PKE;
115 	qat_req->pke_hdr.hdr_flags =
116 		ICP_QAT_FW_COMN_HDR_FLAGS_BUILD
117 		(ICP_QAT_FW_COMN_REQ_FLAG_SET);
118 }
119 
120 static void
121 cleanup_arrays(struct qat_asym_op_cookie *cookie,
122 		int in_count, int out_count, int alg_size)
123 {
124 	int i;
125 
126 	for (i = 0; i < in_count; i++)
127 		memset(cookie->input_array[i], 0x0, alg_size);
128 	for (i = 0; i < out_count; i++)
129 		memset(cookie->output_array[i], 0x0, alg_size);
130 }
131 
132 static void
133 cleanup_crt(struct qat_asym_op_cookie *cookie,
134 		int alg_size)
135 {
136 	int i;
137 
138 	memset(cookie->input_array[0], 0x0, alg_size);
139 	for (i = 1; i < QAT_ASYM_RSA_QT_NUM_IN_PARAMS; i++)
140 		memset(cookie->input_array[i], 0x0, alg_size / 2);
141 	for (i = 0; i < QAT_ASYM_RSA_NUM_OUT_PARAMS; i++)
142 		memset(cookie->output_array[i], 0x0, alg_size);
143 }
144 
145 static void
146 cleanup(struct qat_asym_op_cookie *cookie,
147 		const struct rte_crypto_asym_xform *xform)
148 {
149 	if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODEX)
150 		cleanup_arrays(cookie, QAT_ASYM_MODEXP_NUM_IN_PARAMS,
151 				QAT_ASYM_MODEXP_NUM_OUT_PARAMS,
152 				cookie->alg_bytesize);
153 	else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_MODINV)
154 		cleanup_arrays(cookie, QAT_ASYM_MODINV_NUM_IN_PARAMS,
155 				QAT_ASYM_MODINV_NUM_OUT_PARAMS,
156 				cookie->alg_bytesize);
157 	else if (xform->xform_type == RTE_CRYPTO_ASYM_XFORM_RSA) {
158 		if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT)
159 			cleanup_crt(cookie, cookie->alg_bytesize);
160 		else {
161 			cleanup_arrays(cookie, QAT_ASYM_RSA_NUM_IN_PARAMS,
162 				QAT_ASYM_RSA_NUM_OUT_PARAMS,
163 				cookie->alg_bytesize);
164 		}
165 	} else {
166 		cleanup_arrays(cookie, QAT_ASYM_MAX_PARAMS,
167 				QAT_ASYM_MAX_PARAMS,
168 				QAT_PKE_MAX_LN_SIZE);
169 	}
170 }
171 
172 static int
173 check_zero(rte_crypto_param n)
174 {
175 	int i, len = n.length;
176 
177 	if (len < 8) {
178 		for (i = len - 1; i >= 0; i--) {
179 			if (n.data[i] != 0x0)
180 				return 0;
181 		}
182 	} else if (len == 8 && *(uint64_t *)&n.data[len - 8] == 0) {
183 		return 1;
184 	} else if (*(uint64_t *)&n.data[len - 8] == 0) {
185 		for (i = len - 9; i >= 0; i--) {
186 			if (n.data[i] != 0x0)
187 				return 0;
188 		}
189 	} else
190 		return 0;
191 
192 	return 1;
193 }
194 
195 static struct qat_asym_function
196 get_asym_function(const struct rte_crypto_asym_xform *xform)
197 {
198 	struct qat_asym_function qat_function;
199 
200 	switch (xform->xform_type) {
201 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
202 		qat_function = get_modexp_function(xform);
203 		break;
204 	case RTE_CRYPTO_ASYM_XFORM_MODINV:
205 		qat_function = get_modinv_function(xform);
206 		break;
207 	default:
208 		qat_function.func_id = 0;
209 		break;
210 	}
211 
212 	return qat_function;
213 }
214 
215 static int
216 modexp_set_input(struct icp_qat_fw_pke_request *qat_req,
217 		struct qat_asym_op_cookie *cookie,
218 		const struct rte_crypto_asym_op *asym_op,
219 		const struct rte_crypto_asym_xform *xform)
220 {
221 	struct qat_asym_function qat_function;
222 	uint32_t alg_bytesize, func_id, in_bytesize;
223 	int status = 0;
224 
225 	CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod exp",
226 			"modulus", status);
227 	CHECK_IF_NOT_EMPTY(xform->modex.exponent, "mod exp",
228 				"exponent", status);
229 	if (status)
230 		return status;
231 
232 	if (asym_op->modex.base.length > xform->modex.exponent.length &&
233 		asym_op->modex.base.length > xform->modex.modulus.length) {
234 		in_bytesize = asym_op->modex.base.length;
235 	} else if (xform->modex.exponent.length > xform->modex.modulus.length)
236 		in_bytesize = xform->modex.exponent.length;
237 	else
238 		in_bytesize = xform->modex.modulus.length;
239 
240 	qat_function = get_modexp_function2(in_bytesize);
241 	func_id = qat_function.func_id;
242 	if (qat_function.func_id == 0) {
243 		QAT_LOG(ERR, "Cannot obtain functionality id");
244 		return -EINVAL;
245 	}
246 	alg_bytesize = qat_function.bytesize;
247 
248 	SET_PKE_LN(asym_op->modex.base, alg_bytesize, 0);
249 	SET_PKE_LN(xform->modex.exponent, alg_bytesize, 1);
250 	SET_PKE_LN(xform->modex.modulus, alg_bytesize, 2);
251 
252 	cookie->alg_bytesize = alg_bytesize;
253 	qat_req->pke_hdr.cd_pars.func_id = func_id;
254 	qat_req->input_param_count = QAT_ASYM_MODEXP_NUM_IN_PARAMS;
255 	qat_req->output_param_count = QAT_ASYM_MODEXP_NUM_OUT_PARAMS;
256 
257 	HEXDUMP("ModExp base", cookie->input_array[0], alg_bytesize);
258 	HEXDUMP("ModExp exponent", cookie->input_array[1], alg_bytesize);
259 	HEXDUMP("ModExp modulus", cookie->input_array[2], alg_bytesize);
260 
261 	return status;
262 }
263 
264 static uint8_t
265 modexp_collect(struct rte_crypto_asym_op *asym_op,
266 		const struct qat_asym_op_cookie *cookie,
267 		const struct rte_crypto_asym_xform *xform)
268 {
269 	rte_crypto_param n = xform->modex.modulus;
270 	uint32_t alg_bytesize = cookie->alg_bytesize;
271 	uint8_t *modexp_result = asym_op->modex.result.data;
272 
273 	if (n.length > alg_bytesize) {
274 		QAT_LOG(ERR, "Incorrect length of modexp modulus");
275 		return RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
276 	}
277 	rte_memcpy(modexp_result,
278 		cookie->output_array[0] + alg_bytesize
279 		- n.length, n.length);
280 	asym_op->modex.result.length = alg_bytesize;
281 	HEXDUMP("ModExp result", cookie->output_array[0],
282 			alg_bytesize);
283 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
284 }
285 
286 static int
287 modinv_set_input(struct icp_qat_fw_pke_request *qat_req,
288 		struct qat_asym_op_cookie *cookie,
289 		const struct rte_crypto_asym_op *asym_op,
290 		const struct rte_crypto_asym_xform *xform)
291 {
292 	struct qat_asym_function qat_function;
293 	uint32_t alg_bytesize, func_id;
294 	int status = 0;
295 
296 	CHECK_IF_NOT_EMPTY(xform->modex.modulus, "mod inv",
297 			"modulus", status);
298 	if (status)
299 		return status;
300 
301 	qat_function = get_asym_function(xform);
302 	func_id = qat_function.func_id;
303 	if (func_id == 0) {
304 		QAT_LOG(ERR, "Cannot obtain functionality id");
305 		return -EINVAL;
306 	}
307 	alg_bytesize = qat_function.bytesize;
308 
309 	SET_PKE_LN(asym_op->modinv.base, alg_bytesize, 0);
310 	SET_PKE_LN(xform->modinv.modulus, alg_bytesize, 1);
311 
312 	cookie->alg_bytesize = alg_bytesize;
313 	qat_req->pke_hdr.cd_pars.func_id = func_id;
314 	qat_req->input_param_count =
315 			QAT_ASYM_MODINV_NUM_IN_PARAMS;
316 	qat_req->output_param_count =
317 			QAT_ASYM_MODINV_NUM_OUT_PARAMS;
318 
319 	HEXDUMP("ModInv base", cookie->input_array[0], alg_bytesize);
320 	HEXDUMP("ModInv modulus", cookie->input_array[1], alg_bytesize);
321 
322 	return 0;
323 }
324 
325 static uint8_t
326 modinv_collect(struct rte_crypto_asym_op *asym_op,
327 		const struct qat_asym_op_cookie *cookie,
328 		const struct rte_crypto_asym_xform *xform)
329 {
330 	rte_crypto_param n = xform->modinv.modulus;
331 	uint8_t *modinv_result = asym_op->modinv.result.data;
332 	uint32_t alg_bytesize = cookie->alg_bytesize;
333 
334 	if (n.length > alg_bytesize) {
335 		QAT_LOG(ERR, "Incorrect length of modinv modulus");
336 		return RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
337 	}
338 	rte_memcpy(modinv_result + (asym_op->modinv.result.length
339 		- n.length),
340 		cookie->output_array[0] + alg_bytesize
341 		- n.length, n.length);
342 	asym_op->modinv.result.length = alg_bytesize;
343 	HEXDUMP("ModInv result", cookie->output_array[0],
344 			alg_bytesize);
345 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
346 }
347 
348 static int
349 rsa_set_pub_input(struct icp_qat_fw_pke_request *qat_req,
350 		struct qat_asym_op_cookie *cookie,
351 		const struct rte_crypto_asym_op *asym_op,
352 		const struct rte_crypto_asym_xform *xform)
353 {
354 	struct qat_asym_function qat_function;
355 	uint32_t alg_bytesize, func_id;
356 	int status = 0;
357 
358 	qat_function = get_rsa_enc_function(xform);
359 	func_id = qat_function.func_id;
360 	if (func_id == 0) {
361 		QAT_LOG(ERR, "Cannot obtain functionality id");
362 		return -EINVAL;
363 	}
364 	alg_bytesize = qat_function.bytesize;
365 
366 	if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT) {
367 		switch (xform->rsa.padding.type) {
368 		case RTE_CRYPTO_RSA_PADDING_NONE:
369 			SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
370 			break;
371 		default:
372 			QAT_LOG(ERR,
373 				"Invalid RSA padding (Encryption)"
374 				);
375 			return -EINVAL;
376 		}
377 		HEXDUMP("RSA Message", cookie->input_array[0], alg_bytesize);
378 	} else {
379 		switch (xform->rsa.padding.type) {
380 		case RTE_CRYPTO_RSA_PADDING_NONE:
381 			SET_PKE_LN(asym_op->rsa.sign, alg_bytesize, 0);
382 			break;
383 		default:
384 			QAT_LOG(ERR,
385 				"Invalid RSA padding (Verify)");
386 			return -EINVAL;
387 		}
388 		HEXDUMP("RSA Signature", cookie->input_array[0],
389 				alg_bytesize);
390 	}
391 
392 	SET_PKE_LN(xform->rsa.e, alg_bytesize, 1);
393 	SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
394 
395 	cookie->alg_bytesize = alg_bytesize;
396 	qat_req->pke_hdr.cd_pars.func_id = func_id;
397 
398 	HEXDUMP("RSA Public Key", cookie->input_array[1], alg_bytesize);
399 	HEXDUMP("RSA Modulus", cookie->input_array[2], alg_bytesize);
400 
401 	return status;
402 }
403 
404 static int
405 rsa_set_priv_input(struct icp_qat_fw_pke_request *qat_req,
406 		struct qat_asym_op_cookie *cookie,
407 		const struct rte_crypto_asym_op *asym_op,
408 		const struct rte_crypto_asym_xform *xform)
409 {
410 	struct qat_asym_function qat_function;
411 	uint32_t alg_bytesize, func_id;
412 	int status = 0;
413 
414 	if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
415 		qat_function = get_rsa_crt_function(xform);
416 		func_id = qat_function.func_id;
417 		if (func_id == 0) {
418 			QAT_LOG(ERR, "Cannot obtain functionality id");
419 			return -EINVAL;
420 		}
421 		alg_bytesize = qat_function.bytesize;
422 		qat_req->input_param_count =
423 				QAT_ASYM_RSA_QT_NUM_IN_PARAMS;
424 
425 		SET_PKE_LN(xform->rsa.qt.p, (alg_bytesize >> 1), 1);
426 		SET_PKE_LN(xform->rsa.qt.q, (alg_bytesize >> 1), 2);
427 		SET_PKE_LN(xform->rsa.qt.dP, (alg_bytesize >> 1), 3);
428 		SET_PKE_LN(xform->rsa.qt.dQ, (alg_bytesize >> 1), 4);
429 		SET_PKE_LN(xform->rsa.qt.qInv, (alg_bytesize >> 1), 5);
430 
431 		HEXDUMP("RSA p", cookie->input_array[1],
432 				alg_bytesize);
433 		HEXDUMP("RSA q", cookie->input_array[2],
434 				alg_bytesize);
435 		HEXDUMP("RSA dP", cookie->input_array[3],
436 				alg_bytesize);
437 		HEXDUMP("RSA dQ", cookie->input_array[4],
438 				alg_bytesize);
439 		HEXDUMP("RSA qInv", cookie->input_array[5],
440 				alg_bytesize);
441 	} else if (xform->rsa.key_type ==
442 			RTE_RSA_KEY_TYPE_EXP) {
443 		qat_function = get_rsa_dec_function(xform);
444 		func_id = qat_function.func_id;
445 		if (func_id == 0) {
446 			QAT_LOG(ERR, "Cannot obtain functionality id");
447 			return -EINVAL;
448 		}
449 		alg_bytesize = qat_function.bytesize;
450 
451 		SET_PKE_LN(xform->rsa.d, alg_bytesize, 1);
452 		SET_PKE_LN(xform->rsa.n, alg_bytesize, 2);
453 
454 		HEXDUMP("RSA d", cookie->input_array[1],
455 				alg_bytesize);
456 		HEXDUMP("RSA n", cookie->input_array[2],
457 				alg_bytesize);
458 	} else {
459 		QAT_LOG(ERR, "Invalid RSA key type");
460 		return -EINVAL;
461 	}
462 
463 	if (asym_op->rsa.op_type ==
464 			RTE_CRYPTO_ASYM_OP_DECRYPT) {
465 		switch (xform->rsa.padding.type) {
466 		case RTE_CRYPTO_RSA_PADDING_NONE:
467 			SET_PKE_LN(asym_op->rsa.cipher,	alg_bytesize, 0);
468 			HEXDUMP("RSA ciphertext", cookie->input_array[0],
469 				alg_bytesize);
470 			break;
471 		default:
472 			QAT_LOG(ERR,
473 				"Invalid padding of RSA (Decrypt)");
474 			return -(EINVAL);
475 		}
476 
477 	} else if (asym_op->rsa.op_type ==
478 			RTE_CRYPTO_ASYM_OP_SIGN) {
479 		switch (xform->rsa.padding.type) {
480 		case RTE_CRYPTO_RSA_PADDING_NONE:
481 			SET_PKE_LN(asym_op->rsa.message, alg_bytesize, 0);
482 			HEXDUMP("RSA text to be signed", cookie->input_array[0],
483 				alg_bytesize);
484 			break;
485 		default:
486 			QAT_LOG(ERR,
487 				"Invalid padding of RSA (Signature)");
488 			return -(EINVAL);
489 		}
490 	}
491 
492 	cookie->alg_bytesize = alg_bytesize;
493 	qat_req->pke_hdr.cd_pars.func_id = func_id;
494 	return status;
495 }
496 
497 static int
498 rsa_set_input(struct icp_qat_fw_pke_request *qat_req,
499 		struct qat_asym_op_cookie *cookie,
500 		const struct rte_crypto_asym_op *asym_op,
501 		const struct rte_crypto_asym_xform *xform)
502 {
503 	qat_req->input_param_count =
504 			QAT_ASYM_RSA_NUM_IN_PARAMS;
505 	qat_req->output_param_count =
506 			QAT_ASYM_RSA_NUM_OUT_PARAMS;
507 
508 	if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
509 			asym_op->rsa.op_type ==
510 				RTE_CRYPTO_ASYM_OP_VERIFY) {
511 		return rsa_set_pub_input(qat_req, cookie, asym_op, xform);
512 	} else {
513 		return rsa_set_priv_input(qat_req, cookie, asym_op, xform);
514 	}
515 }
516 
517 static uint8_t
518 rsa_collect(struct rte_crypto_asym_op *asym_op,
519 		const struct qat_asym_op_cookie *cookie,
520 		const struct rte_crypto_asym_xform *xform)
521 {
522 	uint32_t alg_bytesize = cookie->alg_bytesize;
523 
524 	if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_ENCRYPT ||
525 		asym_op->rsa.op_type ==	RTE_CRYPTO_ASYM_OP_VERIFY) {
526 
527 		if (asym_op->rsa.op_type ==
528 				RTE_CRYPTO_ASYM_OP_ENCRYPT) {
529 			rte_memcpy(asym_op->rsa.cipher.data,
530 					cookie->output_array[0],
531 					alg_bytesize);
532 			asym_op->rsa.cipher.length = alg_bytesize;
533 			HEXDUMP("RSA Encrypted data", cookie->output_array[0],
534 				alg_bytesize);
535 		} else {
536 			switch (xform->rsa.padding.type) {
537 			case RTE_CRYPTO_RSA_PADDING_NONE:
538 				rte_memcpy(asym_op->rsa.cipher.data,
539 						cookie->output_array[0],
540 						alg_bytesize);
541 				asym_op->rsa.cipher.length = alg_bytesize;
542 				HEXDUMP("RSA signature",
543 					cookie->output_array[0],
544 					alg_bytesize);
545 				break;
546 			default:
547 				QAT_LOG(ERR, "Padding not supported");
548 				return RTE_CRYPTO_OP_STATUS_ERROR;
549 			}
550 		}
551 	} else {
552 		if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_DECRYPT) {
553 			switch (xform->rsa.padding.type) {
554 			case RTE_CRYPTO_RSA_PADDING_NONE:
555 				rte_memcpy(asym_op->rsa.message.data,
556 					cookie->output_array[0],
557 					alg_bytesize);
558 				asym_op->rsa.message.length = alg_bytesize;
559 				HEXDUMP("RSA Decrypted Message",
560 					cookie->output_array[0],
561 					alg_bytesize);
562 				break;
563 			default:
564 				QAT_LOG(ERR, "Padding not supported");
565 				return RTE_CRYPTO_OP_STATUS_ERROR;
566 			}
567 		} else {
568 			rte_memcpy(asym_op->rsa.sign.data,
569 				cookie->output_array[0],
570 				alg_bytesize);
571 			asym_op->rsa.sign.length = alg_bytesize;
572 			HEXDUMP("RSA Signature", cookie->output_array[0],
573 				alg_bytesize);
574 		}
575 	}
576 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
577 }
578 
579 static int
580 ecdsa_set_input(struct icp_qat_fw_pke_request *qat_req,
581 		struct qat_asym_op_cookie *cookie,
582 		const struct rte_crypto_asym_op *asym_op,
583 		const struct rte_crypto_asym_xform *xform)
584 {
585 	struct qat_asym_function qat_function;
586 	uint32_t qat_func_alignsize, func_id;
587 	int curve_id;
588 
589 	curve_id = pick_curve(xform);
590 	if (curve_id < 0) {
591 		QAT_LOG(DEBUG, "Incorrect elliptic curve");
592 		return -EINVAL;
593 	}
594 
595 	switch (asym_op->ecdsa.op_type) {
596 	case RTE_CRYPTO_ASYM_OP_SIGN:
597 		qat_function = get_ecdsa_function(xform);
598 		func_id = qat_function.func_id;
599 		if (func_id == 0) {
600 			QAT_LOG(ERR, "Cannot obtain functionality id");
601 			return -EINVAL;
602 		}
603 		qat_func_alignsize =
604 			RTE_ALIGN_CEIL(qat_function.bytesize, 8);
605 
606 		SET_PKE_9A_IN(xform->ec.pkey, 0);
607 		SET_PKE_9A_IN(asym_op->ecdsa.message, 1);
608 		SET_PKE_9A_IN(asym_op->ecdsa.k, 2);
609 		SET_PKE_9A_EC(curve[curve_id], b, 3);
610 		SET_PKE_9A_EC(curve[curve_id], a, 4);
611 		SET_PKE_9A_EC(curve[curve_id], p, 5);
612 		SET_PKE_9A_EC(curve[curve_id], n, 6);
613 		SET_PKE_9A_EC(curve[curve_id], y, 7);
614 		SET_PKE_9A_EC(curve[curve_id], x, 8);
615 
616 		cookie->alg_bytesize = curve[curve_id].bytesize;
617 		cookie->qat_func_alignsize = qat_func_alignsize;
618 		qat_req->pke_hdr.cd_pars.func_id = func_id;
619 		qat_req->input_param_count =
620 				QAT_ASYM_ECDSA_RS_SIGN_IN_PARAMS;
621 		qat_req->output_param_count =
622 				QAT_ASYM_ECDSA_RS_SIGN_OUT_PARAMS;
623 
624 		HEXDUMP_OFF_F("ECDSA d", 0);
625 		HEXDUMP_OFF_F("ECDSA e", 1);
626 		HEXDUMP_OFF_F("ECDSA k", 2);
627 		HEXDUMP_OFF_F("ECDSA b", 3);
628 		HEXDUMP_OFF_F("ECDSA a", 4);
629 		HEXDUMP_OFF_F("ECDSA n", 5);
630 		HEXDUMP_OFF_F("ECDSA y", 6);
631 		HEXDUMP_OFF_F("ECDSA x", 7);
632 		break;
633 	case RTE_CRYPTO_ASYM_OP_VERIFY:
634 		qat_function = get_ecdsa_verify_function(xform);
635 		func_id = qat_function.func_id;
636 		if (func_id == 0) {
637 			QAT_LOG(ERR, "Cannot obtain functionality id");
638 			return -EINVAL;
639 		}
640 		qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
641 
642 		SET_PKE_9A_IN(asym_op->ecdsa.message, 10);
643 		SET_PKE_9A_IN(asym_op->ecdsa.s, 9);
644 		SET_PKE_9A_IN(asym_op->ecdsa.r, 8);
645 		SET_PKE_9A_EC(curve[curve_id], n, 7);
646 		SET_PKE_9A_EC(curve[curve_id], x, 6);
647 		SET_PKE_9A_EC(curve[curve_id], y, 5);
648 		SET_PKE_9A_IN(xform->ec.q.x, 4);
649 		SET_PKE_9A_IN(xform->ec.q.y, 3);
650 		SET_PKE_9A_EC(curve[curve_id], a, 2);
651 		SET_PKE_9A_EC(curve[curve_id], b, 1);
652 		SET_PKE_9A_EC(curve[curve_id], p, 0);
653 
654 		cookie->alg_bytesize = curve[curve_id].bytesize;
655 		cookie->qat_func_alignsize = qat_func_alignsize;
656 		qat_req->pke_hdr.cd_pars.func_id = func_id;
657 		qat_req->input_param_count =
658 				QAT_ASYM_ECDSA_RS_VERIFY_IN_PARAMS;
659 		qat_req->output_param_count =
660 				QAT_ASYM_ECDSA_RS_VERIFY_OUT_PARAMS;
661 
662 		HEXDUMP_OFF_F("p", 0);
663 		HEXDUMP_OFF_F("b", 1);
664 		HEXDUMP_OFF_F("a", 2);
665 		HEXDUMP_OFF_F("y", 3);
666 		HEXDUMP_OFF_F("x", 4);
667 		HEXDUMP_OFF_F("yG", 5);
668 		HEXDUMP_OFF_F("xG", 6);
669 		HEXDUMP_OFF_F("n", 7);
670 		HEXDUMP_OFF_F("r", 8);
671 		HEXDUMP_OFF_F("s", 9);
672 		HEXDUMP_OFF_F("e", 10);
673 		break;
674 	default:
675 		return -1;
676 	}
677 
678 	return 0;
679 }
680 
681 static uint8_t
682 ecdsa_collect(struct rte_crypto_asym_op *asym_op,
683 		const struct qat_asym_op_cookie *cookie)
684 {
685 	uint32_t alg_bytesize = cookie->alg_bytesize;
686 	uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
687 	uint32_t ltrim = qat_func_alignsize - alg_bytesize;
688 
689 	if (asym_op->rsa.op_type == RTE_CRYPTO_ASYM_OP_SIGN) {
690 		uint8_t *r = asym_op->ecdsa.r.data;
691 		uint8_t *s = asym_op->ecdsa.s.data;
692 
693 		asym_op->ecdsa.r.length = alg_bytesize;
694 		asym_op->ecdsa.s.length = alg_bytesize;
695 		rte_memcpy(r, &cookie->output_array[0][ltrim], alg_bytesize);
696 		rte_memcpy(s, &cookie->output_array[1][ltrim], alg_bytesize);
697 
698 		HEXDUMP("R", cookie->output_array[0],
699 			qat_func_alignsize);
700 		HEXDUMP("S", cookie->output_array[1],
701 			qat_func_alignsize);
702 	}
703 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
704 }
705 
706 static int
707 ecpm_set_input(struct icp_qat_fw_pke_request *qat_req,
708 		struct qat_asym_op_cookie *cookie,
709 		const struct rte_crypto_asym_op *asym_op,
710 		const struct rte_crypto_asym_xform *xform)
711 {
712 	struct qat_asym_function qat_function;
713 	uint32_t qat_func_alignsize, func_id;
714 	int curve_id;
715 
716 	curve_id = pick_curve(xform);
717 	if (curve_id < 0) {
718 		QAT_LOG(DEBUG, "Incorrect elliptic curve");
719 		return -EINVAL;
720 	}
721 
722 	qat_function = get_ecpm_function(xform);
723 	func_id = qat_function.func_id;
724 	if (func_id == 0) {
725 		QAT_LOG(ERR, "Cannot obtain functionality id");
726 		return -EINVAL;
727 	}
728 	qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
729 
730 	SET_PKE_LN(asym_op->ecpm.scalar, qat_func_alignsize, 0);
731 	SET_PKE_LN(asym_op->ecpm.p.x, qat_func_alignsize, 1);
732 	SET_PKE_LN(asym_op->ecpm.p.y, qat_func_alignsize, 2);
733 	SET_PKE_LN_EC(curve[curve_id], a, 3);
734 	SET_PKE_LN_EC(curve[curve_id], b, 4);
735 	SET_PKE_LN_EC(curve[curve_id], p, 5);
736 	SET_PKE_LN_EC(curve[curve_id], h, 6);
737 
738 	cookie->alg_bytesize = curve[curve_id].bytesize;
739 	cookie->qat_func_alignsize = qat_func_alignsize;
740 	qat_req->pke_hdr.cd_pars.func_id = func_id;
741 	qat_req->input_param_count =
742 			QAT_ASYM_ECPM_IN_PARAMS;
743 	qat_req->output_param_count =
744 			QAT_ASYM_ECPM_OUT_PARAMS;
745 
746 	HEXDUMP("k", cookie->input_array[0], qat_func_alignsize);
747 	HEXDUMP("xG", cookie->input_array[1], qat_func_alignsize);
748 	HEXDUMP("yG", cookie->input_array[2], qat_func_alignsize);
749 	HEXDUMP("a", cookie->input_array[3], qat_func_alignsize);
750 	HEXDUMP("b", cookie->input_array[4], qat_func_alignsize);
751 	HEXDUMP("q", cookie->input_array[5], qat_func_alignsize);
752 	HEXDUMP("h", cookie->input_array[6], qat_func_alignsize);
753 
754 	return 0;
755 }
756 
757 static uint8_t
758 ecpm_collect(struct rte_crypto_asym_op *asym_op,
759 		const struct qat_asym_op_cookie *cookie)
760 {
761 	uint8_t *x = asym_op->ecpm.r.x.data;
762 	uint8_t *y = asym_op->ecpm.r.y.data;
763 	uint32_t alg_bytesize = cookie->alg_bytesize;
764 	uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
765 	uint32_t ltrim = qat_func_alignsize - alg_bytesize;
766 
767 	asym_op->ecpm.r.x.length = alg_bytesize;
768 	asym_op->ecpm.r.y.length = alg_bytesize;
769 	rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
770 	rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
771 
772 	HEXDUMP("rX", cookie->output_array[0],
773 		qat_func_alignsize);
774 	HEXDUMP("rY", cookie->output_array[1],
775 		qat_func_alignsize);
776 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
777 }
778 
779 static int
780 ecdh_set_input(struct icp_qat_fw_pke_request *qat_req,
781 		struct qat_asym_op_cookie *cookie,
782 		const struct rte_crypto_asym_op *asym_op,
783 		const struct rte_crypto_asym_xform *xform)
784 {
785 	struct qat_asym_function qat_function;
786 	uint32_t qat_func_alignsize, func_id;
787 	int curve_id;
788 
789 	curve_id = pick_curve(xform);
790 	if (curve_id < 0) {
791 		QAT_LOG(DEBUG, "Incorrect elliptic curve");
792 		return -EINVAL;
793 	}
794 
795 	qat_function = get_ecpm_function(xform);
796 	func_id = qat_function.func_id;
797 	if (func_id == 0) {
798 		QAT_LOG(ERR, "Cannot obtain functionality id");
799 		return -EINVAL;
800 	}
801 	qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
802 
803 	if (asym_op->ecdh.ke_type == RTE_CRYPTO_ASYM_KE_PUB_KEY_GENERATE) {
804 		SET_PKE_LN(asym_op->ecdh.priv_key, qat_func_alignsize, 0);
805 		SET_PKE_LN_EC(curve[curve_id], x, 1);
806 		SET_PKE_LN_EC(curve[curve_id], y, 2);
807 	} else {
808 		SET_PKE_LN(asym_op->ecdh.priv_key, qat_func_alignsize, 0);
809 		SET_PKE_LN(asym_op->ecdh.pub_key.x, qat_func_alignsize, 1);
810 		SET_PKE_LN(asym_op->ecdh.pub_key.y, qat_func_alignsize, 2);
811 	}
812 	SET_PKE_LN_EC(curve[curve_id], a, 3);
813 	SET_PKE_LN_EC(curve[curve_id], b, 4);
814 	SET_PKE_LN_EC(curve[curve_id], p, 5);
815 	SET_PKE_LN_EC(curve[curve_id], h, 6);
816 
817 	cookie->alg_bytesize = curve[curve_id].bytesize;
818 	cookie->qat_func_alignsize = qat_func_alignsize;
819 	qat_req->pke_hdr.cd_pars.func_id = func_id;
820 	qat_req->input_param_count =
821 			QAT_ASYM_ECPM_IN_PARAMS;
822 	qat_req->output_param_count =
823 			QAT_ASYM_ECPM_OUT_PARAMS;
824 
825 	HEXDUMP("k", cookie->input_array[0], qat_func_alignsize);
826 	HEXDUMP("xG", cookie->input_array[1], qat_func_alignsize);
827 	HEXDUMP("yG", cookie->input_array[2], qat_func_alignsize);
828 	HEXDUMP("a", cookie->input_array[3], qat_func_alignsize);
829 	HEXDUMP("b", cookie->input_array[4], qat_func_alignsize);
830 	HEXDUMP("q", cookie->input_array[5], qat_func_alignsize);
831 	HEXDUMP("h", cookie->input_array[6], qat_func_alignsize);
832 
833 	return 0;
834 }
835 
836 static int
837 ecdh_verify_set_input(struct icp_qat_fw_pke_request *qat_req,
838 		struct qat_asym_op_cookie *cookie,
839 		const struct rte_crypto_asym_op *asym_op,
840 		const struct rte_crypto_asym_xform *xform)
841 {
842 	struct qat_asym_function qat_function;
843 	uint32_t qat_func_alignsize, func_id;
844 	int curve_id;
845 
846 	curve_id = pick_curve(xform);
847 	if (curve_id < 0) {
848 		QAT_LOG(DEBUG, "Incorrect elliptic curve");
849 		return -EINVAL;
850 	}
851 
852 	qat_function = get_ec_verify_function(xform);
853 	func_id = qat_function.func_id;
854 	if (func_id == 0) {
855 		QAT_LOG(ERR, "Cannot obtain functionality id");
856 		return -EINVAL;
857 	}
858 	qat_func_alignsize = RTE_ALIGN_CEIL(qat_function.bytesize, 8);
859 
860 	SET_PKE_LN(asym_op->ecdh.pub_key.x, qat_func_alignsize, 0);
861 	SET_PKE_LN(asym_op->ecdh.pub_key.y, qat_func_alignsize, 1);
862 	SET_PKE_LN_EC(curve[curve_id], p, 2);
863 	SET_PKE_LN_EC(curve[curve_id], a, 3);
864 	SET_PKE_LN_EC(curve[curve_id], b, 4);
865 
866 	cookie->alg_bytesize = curve[curve_id].bytesize;
867 	cookie->qat_func_alignsize = qat_func_alignsize;
868 	qat_req->pke_hdr.cd_pars.func_id = func_id;
869 	qat_req->input_param_count =
870 			5;
871 	qat_req->output_param_count =
872 			0;
873 
874 	HEXDUMP("x", cookie->input_array[0], qat_func_alignsize);
875 	HEXDUMP("y", cookie->input_array[1], qat_func_alignsize);
876 	HEXDUMP("p", cookie->input_array[2], qat_func_alignsize);
877 	HEXDUMP("a", cookie->input_array[3], qat_func_alignsize);
878 	HEXDUMP("b", cookie->input_array[4], qat_func_alignsize);
879 
880 	return 0;
881 }
882 
883 static uint8_t
884 ecdh_collect(struct rte_crypto_asym_op *asym_op,
885 		const struct qat_asym_op_cookie *cookie)
886 {
887 	uint8_t *x, *y;
888 	uint32_t alg_bytesize = cookie->alg_bytesize;
889 	uint32_t qat_func_alignsize = cookie->qat_func_alignsize;
890 	uint32_t ltrim = qat_func_alignsize - alg_bytesize;
891 
892 	if (asym_op->ecdh.ke_type == RTE_CRYPTO_ASYM_KE_PUB_KEY_VERIFY)
893 		return RTE_CRYPTO_OP_STATUS_SUCCESS;
894 
895 	if (asym_op->ecdh.ke_type == RTE_CRYPTO_ASYM_KE_PUB_KEY_GENERATE) {
896 		asym_op->ecdh.pub_key.x.length = alg_bytesize;
897 		asym_op->ecdh.pub_key.y.length = alg_bytesize;
898 		x = asym_op->ecdh.pub_key.x.data;
899 		y = asym_op->ecdh.pub_key.y.data;
900 	} else {
901 		asym_op->ecdh.shared_secret.x.length = alg_bytesize;
902 		asym_op->ecdh.shared_secret.y.length = alg_bytesize;
903 		x = asym_op->ecdh.shared_secret.x.data;
904 		y = asym_op->ecdh.shared_secret.y.data;
905 	}
906 
907 	rte_memcpy(x, &cookie->output_array[0][ltrim], alg_bytesize);
908 	rte_memcpy(y, &cookie->output_array[1][ltrim], alg_bytesize);
909 
910 	HEXDUMP("X", cookie->output_array[0],
911 		qat_func_alignsize);
912 	HEXDUMP("Y", cookie->output_array[1],
913 		qat_func_alignsize);
914 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
915 }
916 
917 static int
918 sm2_ecdsa_sign_set_input(struct icp_qat_fw_pke_request *qat_req,
919 		struct qat_asym_op_cookie *cookie,
920 		const struct rte_crypto_asym_op *asym_op,
921 		const struct rte_crypto_asym_xform *xform)
922 {
923 	const struct qat_asym_function qat_function =
924 		get_sm2_ecdsa_sign_function();
925 	const uint32_t qat_func_alignsize =
926 		qat_function.bytesize;
927 
928 	SET_PKE_LN(asym_op->sm2.k, qat_func_alignsize, 0);
929 	SET_PKE_LN(asym_op->sm2.message, qat_func_alignsize, 1);
930 	SET_PKE_LN(xform->ec.pkey, qat_func_alignsize, 2);
931 
932 	cookie->alg_bytesize = qat_function.bytesize;
933 	cookie->qat_func_alignsize = qat_function.bytesize;
934 	qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id;
935 	qat_req->input_param_count = 3;
936 	qat_req->output_param_count = 2;
937 
938 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
939 }
940 
941 static int
942 sm2_ecdsa_verify_set_input(struct icp_qat_fw_pke_request *qat_req,
943 		struct qat_asym_op_cookie *cookie,
944 		const struct rte_crypto_asym_op *asym_op,
945 		const struct rte_crypto_asym_xform *xform)
946 {
947 	const struct qat_asym_function qat_function =
948 		get_sm2_ecdsa_verify_function();
949 	const uint32_t qat_func_alignsize =
950 		qat_function.bytesize;
951 
952 	SET_PKE_LN(asym_op->sm2.message, qat_func_alignsize, 0);
953 	SET_PKE_LN(asym_op->sm2.r, qat_func_alignsize, 1);
954 	SET_PKE_LN(asym_op->sm2.s, qat_func_alignsize, 2);
955 	SET_PKE_LN(xform->ec.q.x, qat_func_alignsize, 3);
956 	SET_PKE_LN(xform->ec.q.y, qat_func_alignsize, 4);
957 
958 	cookie->alg_bytesize = qat_function.bytesize;
959 	cookie->qat_func_alignsize = qat_function.bytesize;
960 	qat_req->pke_hdr.cd_pars.func_id = qat_function.func_id;
961 	qat_req->input_param_count = 5;
962 	qat_req->output_param_count = 0;
963 
964 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
965 }
966 
967 static uint8_t
968 sm2_ecdsa_sign_collect(struct rte_crypto_asym_op *asym_op,
969 		const struct qat_asym_op_cookie *cookie)
970 {
971 	uint32_t alg_bytesize = cookie->alg_bytesize;
972 
973 	if (asym_op->sm2.op_type == RTE_CRYPTO_ASYM_OP_VERIFY)
974 		return RTE_CRYPTO_OP_STATUS_SUCCESS;
975 
976 	rte_memcpy(asym_op->sm2.r.data, cookie->output_array[0], alg_bytesize);
977 	rte_memcpy(asym_op->sm2.s.data, cookie->output_array[1], alg_bytesize);
978 	asym_op->sm2.r.length = alg_bytesize;
979 	asym_op->sm2.s.length = alg_bytesize;
980 
981 	HEXDUMP("SM2 R", cookie->output_array[0],
982 		alg_bytesize);
983 	HEXDUMP("SM2 S", cookie->output_array[1],
984 		alg_bytesize);
985 	return RTE_CRYPTO_OP_STATUS_SUCCESS;
986 }
987 
988 static int
989 asym_set_input(struct icp_qat_fw_pke_request *qat_req,
990 		struct qat_asym_op_cookie *cookie,
991 		const struct rte_crypto_asym_op *asym_op,
992 		const struct rte_crypto_asym_xform *xform,
993 		uint8_t legacy_alg)
994 {
995 	switch (xform->xform_type) {
996 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
997 		return modexp_set_input(qat_req, cookie, asym_op, xform);
998 	case RTE_CRYPTO_ASYM_XFORM_MODINV:
999 		return modinv_set_input(qat_req, cookie, asym_op, xform);
1000 	case RTE_CRYPTO_ASYM_XFORM_RSA:{
1001 		if (unlikely((xform->rsa.n.length < RSA_MODULUS_2048_BITS)
1002 					&& (legacy_alg == 0)))
1003 			return RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
1004 		return rsa_set_input(qat_req, cookie, asym_op, xform);
1005 	}
1006 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1007 		return ecdsa_set_input(qat_req, cookie, asym_op, xform);
1008 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
1009 		return ecpm_set_input(qat_req, cookie, asym_op, xform);
1010 	case RTE_CRYPTO_ASYM_XFORM_ECDH:
1011 		if (asym_op->ecdh.ke_type ==
1012 			RTE_CRYPTO_ASYM_KE_PUB_KEY_VERIFY) {
1013 			return ecdh_verify_set_input(qat_req, cookie,
1014 				asym_op, xform);
1015 		} else {
1016 			return ecdh_set_input(qat_req, cookie,
1017 				asym_op, xform);
1018 		}
1019 	case RTE_CRYPTO_ASYM_XFORM_SM2:
1020 		if (asym_op->sm2.op_type ==
1021 			RTE_CRYPTO_ASYM_OP_VERIFY) {
1022 			return sm2_ecdsa_verify_set_input(qat_req, cookie,
1023 						asym_op, xform);
1024 		} else {
1025 			return sm2_ecdsa_sign_set_input(qat_req, cookie,
1026 					asym_op, xform);
1027 		}
1028 	default:
1029 		QAT_LOG(ERR, "Invalid/unsupported asymmetric crypto xform");
1030 		return -EINVAL;
1031 	}
1032 	return 1;
1033 }
1034 
1035 static int
1036 qat_asym_build_request(void *in_op,
1037 	uint8_t *out_msg,
1038 	void *op_cookie,
1039 	struct qat_qp *qp)
1040 {
1041 	struct rte_crypto_op *op = (struct rte_crypto_op *)in_op;
1042 	struct rte_crypto_asym_op *asym_op = op->asym;
1043 	struct icp_qat_fw_pke_request *qat_req =
1044 			(struct icp_qat_fw_pke_request *)out_msg;
1045 	struct qat_asym_op_cookie *cookie =
1046 			(struct qat_asym_op_cookie *)op_cookie;
1047 	struct rte_crypto_asym_xform *xform;
1048 	struct qat_asym_session *qat_session = (struct qat_asym_session *)
1049 			op->asym->session->sess_private_data;
1050 	int err = 0;
1051 
1052 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
1053 	switch (op->sess_type) {
1054 	case RTE_CRYPTO_OP_WITH_SESSION:
1055 		request_init(qat_req);
1056 		if (unlikely(qat_session == NULL)) {
1057 			QAT_DP_LOG(ERR,
1058 				"Session was not created for this device");
1059 			op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
1060 			goto error;
1061 		}
1062 		xform = &qat_session->xform;
1063 		break;
1064 	case RTE_CRYPTO_OP_SESSIONLESS:
1065 		request_init(qat_req);
1066 		xform = op->asym->xform;
1067 		break;
1068 	default:
1069 		QAT_DP_LOG(ERR, "Invalid session/xform settings");
1070 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
1071 		goto error;
1072 	}
1073 	err = asym_set_input(qat_req, cookie, asym_op, xform,
1074 		qp->qat_dev->options.legacy_alg);
1075 	if (err) {
1076 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
1077 		goto error;
1078 	}
1079 
1080 	qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
1081 	qat_req->pke_mid.src_data_addr = cookie->input_addr;
1082 	qat_req->pke_mid.dest_data_addr = cookie->output_addr;
1083 
1084 	HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
1085 
1086 	return 0;
1087 error:
1088 	qat_req->pke_mid.opaque = (uint64_t)(uintptr_t)op;
1089 	HEXDUMP("qat_req:", qat_req, sizeof(struct icp_qat_fw_pke_request));
1090 	qat_req->output_param_count = 0;
1091 	qat_req->input_param_count = 0;
1092 	qat_req->pke_hdr.service_type = ICP_QAT_FW_COMN_REQ_NULL;
1093 	cookie->error |= err;
1094 
1095 	return 0;
1096 }
1097 
1098 static uint8_t
1099 qat_asym_collect_response(struct rte_crypto_op *op,
1100 		struct qat_asym_op_cookie *cookie,
1101 		struct rte_crypto_asym_xform *xform)
1102 {
1103 	struct rte_crypto_asym_op *asym_op = op->asym;
1104 
1105 	switch (xform->xform_type) {
1106 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
1107 		return modexp_collect(asym_op, cookie, xform);
1108 	case RTE_CRYPTO_ASYM_XFORM_MODINV:
1109 		return modinv_collect(asym_op, cookie, xform);
1110 	case RTE_CRYPTO_ASYM_XFORM_RSA:
1111 		return rsa_collect(asym_op, cookie, xform);
1112 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1113 		return ecdsa_collect(asym_op, cookie);
1114 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
1115 		return ecpm_collect(asym_op, cookie);
1116 	case RTE_CRYPTO_ASYM_XFORM_ECDH:
1117 		return ecdh_collect(asym_op, cookie);
1118 	case RTE_CRYPTO_ASYM_XFORM_SM2:
1119 		return sm2_ecdsa_sign_collect(asym_op, cookie);
1120 	default:
1121 		QAT_LOG(ERR, "Not supported xform type");
1122 		return  RTE_CRYPTO_OP_STATUS_ERROR;
1123 	}
1124 }
1125 
1126 static int
1127 qat_asym_process_response(void **out_op, uint8_t *resp,
1128 		void *op_cookie, __rte_unused uint64_t *dequeue_err_count)
1129 {
1130 	struct icp_qat_fw_pke_resp *resp_msg =
1131 			(struct icp_qat_fw_pke_resp *)resp;
1132 	struct rte_crypto_op *op = (struct rte_crypto_op *)(uintptr_t)
1133 			(resp_msg->opaque);
1134 	struct qat_asym_op_cookie *cookie = op_cookie;
1135 	struct rte_crypto_asym_xform *xform = NULL;
1136 	struct qat_asym_session *qat_session = (struct qat_asym_session *)
1137 			op->asym->session->sess_private_data;
1138 
1139 	*out_op = op;
1140 	if (cookie->error) {
1141 		cookie->error = 0;
1142 		if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
1143 			op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1144 		QAT_DP_LOG(DEBUG, "Cookie status returned error");
1145 	} else {
1146 		if (ICP_QAT_FW_PKE_RESP_PKE_STAT_GET(
1147 			resp_msg->pke_resp_hdr.resp_status.pke_resp_flags)) {
1148 			if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
1149 				op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1150 			QAT_DP_LOG(DEBUG, "Asymmetric response status"
1151 					" returned error");
1152 		}
1153 		if (resp_msg->pke_resp_hdr.resp_status.comn_err_code) {
1154 			if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
1155 				op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1156 			QAT_DP_LOG(ERR, "Asymmetric common status"
1157 					" returned error");
1158 		}
1159 	}
1160 
1161 	switch (op->sess_type) {
1162 	case RTE_CRYPTO_OP_WITH_SESSION:
1163 		xform = &qat_session->xform;
1164 		break;
1165 	case RTE_CRYPTO_OP_SESSIONLESS:
1166 		xform = op->asym->xform;
1167 		break;
1168 	default:
1169 		QAT_DP_LOG(ERR,
1170 			"Invalid session/xform settings in response ring!");
1171 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
1172 	}
1173 	if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
1174 		op->status = qat_asym_collect_response(op, cookie, xform);
1175 	HEXDUMP("resp_msg:", resp_msg, sizeof(struct icp_qat_fw_pke_resp));
1176 	if (likely(xform != NULL))
1177 		cleanup(cookie, xform);
1178 
1179 	return 1;
1180 }
1181 
1182 static int
1183 session_set_modexp(struct qat_asym_session *qat_session,
1184 			struct rte_crypto_asym_xform *xform)
1185 {
1186 	uint8_t *modulus = xform->modex.modulus.data;
1187 	uint8_t *exponent = xform->modex.exponent.data;
1188 
1189 	qat_session->xform.modex.modulus.data =
1190 		rte_malloc(NULL, xform->modex.modulus.length, 0);
1191 	if (qat_session->xform.modex.modulus.data == NULL)
1192 		return -ENOMEM;
1193 	qat_session->xform.modex.modulus.length = xform->modex.modulus.length;
1194 	qat_session->xform.modex.exponent.data = rte_malloc(NULL,
1195 				xform->modex.exponent.length, 0);
1196 	if (qat_session->xform.modex.exponent.data == NULL) {
1197 		rte_free(qat_session->xform.modex.exponent.data);
1198 		return -ENOMEM;
1199 	}
1200 	qat_session->xform.modex.exponent.length = xform->modex.exponent.length;
1201 
1202 	rte_memcpy(qat_session->xform.modex.modulus.data, modulus,
1203 			xform->modex.modulus.length);
1204 	rte_memcpy(qat_session->xform.modex.exponent.data, exponent,
1205 			xform->modex.exponent.length);
1206 
1207 	return 0;
1208 }
1209 
1210 static int
1211 session_set_modinv(struct qat_asym_session *qat_session,
1212 			struct rte_crypto_asym_xform *xform)
1213 {
1214 	uint8_t *modulus = xform->modinv.modulus.data;
1215 
1216 	qat_session->xform.modinv.modulus.data =
1217 		rte_malloc(NULL, xform->modinv.modulus.length, 0);
1218 	if (qat_session->xform.modinv.modulus.data == NULL)
1219 		return -ENOMEM;
1220 	qat_session->xform.modinv.modulus.length = xform->modinv.modulus.length;
1221 
1222 	rte_memcpy(qat_session->xform.modinv.modulus.data, modulus,
1223 			xform->modinv.modulus.length);
1224 
1225 	return 0;
1226 }
1227 
1228 static int
1229 session_set_rsa(struct qat_asym_session *qat_session,
1230 			struct rte_crypto_asym_xform *xform)
1231 {
1232 	uint8_t *n = xform->rsa.n.data;
1233 	uint8_t *e = xform->rsa.e.data;
1234 	int ret = 0;
1235 
1236 	qat_session->xform.rsa.key_type = xform->rsa.key_type;
1237 
1238 	qat_session->xform.rsa.n.data =
1239 		rte_malloc(NULL, xform->rsa.n.length, 0);
1240 	if (qat_session->xform.rsa.n.data == NULL)
1241 		return -ENOMEM;
1242 	qat_session->xform.rsa.n.length =
1243 		xform->rsa.n.length;
1244 
1245 	qat_session->xform.rsa.e.data =
1246 		rte_malloc(NULL, xform->rsa.e.length, 0);
1247 	if (qat_session->xform.rsa.e.data == NULL) {
1248 		ret = -ENOMEM;
1249 		goto err;
1250 	}
1251 	qat_session->xform.rsa.e.length =
1252 		xform->rsa.e.length;
1253 
1254 	if (xform->rsa.key_type == RTE_RSA_KEY_TYPE_QT) {
1255 		uint8_t *p = xform->rsa.qt.p.data;
1256 		uint8_t *q = xform->rsa.qt.q.data;
1257 		uint8_t *dP = xform->rsa.qt.dP.data;
1258 		uint8_t *dQ = xform->rsa.qt.dQ.data;
1259 		uint8_t *qInv = xform->rsa.qt.qInv.data;
1260 
1261 		qat_session->xform.rsa.qt.p.data =
1262 			rte_malloc(NULL, xform->rsa.qt.p.length, 0);
1263 		if (qat_session->xform.rsa.qt.p.data == NULL) {
1264 			ret = -ENOMEM;
1265 			goto err;
1266 		}
1267 		qat_session->xform.rsa.qt.p.length =
1268 			xform->rsa.qt.p.length;
1269 
1270 		qat_session->xform.rsa.qt.q.data =
1271 			rte_malloc(NULL, xform->rsa.qt.q.length, 0);
1272 		if (qat_session->xform.rsa.qt.q.data == NULL) {
1273 			ret = -ENOMEM;
1274 			goto err;
1275 		}
1276 		qat_session->xform.rsa.qt.q.length =
1277 			xform->rsa.qt.q.length;
1278 
1279 		qat_session->xform.rsa.qt.dP.data =
1280 			rte_malloc(NULL, xform->rsa.qt.dP.length, 0);
1281 		if (qat_session->xform.rsa.qt.dP.data == NULL) {
1282 			ret = -ENOMEM;
1283 			goto err;
1284 		}
1285 		qat_session->xform.rsa.qt.dP.length =
1286 			xform->rsa.qt.dP.length;
1287 
1288 		qat_session->xform.rsa.qt.dQ.data =
1289 			rte_malloc(NULL, xform->rsa.qt.dQ.length, 0);
1290 		if (qat_session->xform.rsa.qt.dQ.data == NULL) {
1291 			ret = -ENOMEM;
1292 			goto err;
1293 		}
1294 		qat_session->xform.rsa.qt.dQ.length =
1295 			xform->rsa.qt.dQ.length;
1296 
1297 		qat_session->xform.rsa.qt.qInv.data =
1298 			rte_malloc(NULL, xform->rsa.qt.qInv.length, 0);
1299 		if (qat_session->xform.rsa.qt.qInv.data == NULL) {
1300 			ret = -ENOMEM;
1301 			goto err;
1302 		}
1303 		qat_session->xform.rsa.qt.qInv.length =
1304 			xform->rsa.qt.qInv.length;
1305 
1306 		rte_memcpy(qat_session->xform.rsa.qt.p.data, p,
1307 				xform->rsa.qt.p.length);
1308 		rte_memcpy(qat_session->xform.rsa.qt.q.data, q,
1309 				xform->rsa.qt.q.length);
1310 		rte_memcpy(qat_session->xform.rsa.qt.dP.data, dP,
1311 				xform->rsa.qt.dP.length);
1312 		rte_memcpy(qat_session->xform.rsa.qt.dQ.data, dQ,
1313 				xform->rsa.qt.dQ.length);
1314 		rte_memcpy(qat_session->xform.rsa.qt.qInv.data, qInv,
1315 				xform->rsa.qt.qInv.length);
1316 
1317 	} else {
1318 		uint8_t *d = xform->rsa.d.data;
1319 
1320 		qat_session->xform.rsa.d.data =
1321 			rte_malloc(NULL, xform->rsa.d.length, 0);
1322 		if (qat_session->xform.rsa.d.data == NULL) {
1323 			ret = -ENOMEM;
1324 			goto err;
1325 		}
1326 		qat_session->xform.rsa.d.length =
1327 			xform->rsa.d.length;
1328 		rte_memcpy(qat_session->xform.rsa.d.data, d,
1329 			xform->rsa.d.length);
1330 	}
1331 
1332 	rte_memcpy(qat_session->xform.rsa.n.data, n,
1333 		xform->rsa.n.length);
1334 	rte_memcpy(qat_session->xform.rsa.e.data, e,
1335 		xform->rsa.e.length);
1336 
1337 	return 0;
1338 
1339 err:
1340 	rte_free(qat_session->xform.rsa.n.data);
1341 	rte_free(qat_session->xform.rsa.e.data);
1342 	rte_free(qat_session->xform.rsa.d.data);
1343 	rte_free(qat_session->xform.rsa.qt.p.data);
1344 	rte_free(qat_session->xform.rsa.qt.q.data);
1345 	rte_free(qat_session->xform.rsa.qt.dP.data);
1346 	rte_free(qat_session->xform.rsa.qt.dQ.data);
1347 	rte_free(qat_session->xform.rsa.qt.qInv.data);
1348 	return ret;
1349 }
1350 
1351 static int
1352 session_set_ec(struct qat_asym_session *qat_session,
1353 			struct rte_crypto_asym_xform *xform)
1354 {
1355 	uint8_t *pkey = xform->ec.pkey.data;
1356 	uint8_t *q_x = xform->ec.q.x.data;
1357 	uint8_t *q_y = xform->ec.q.y.data;
1358 
1359 	qat_session->xform.ec.pkey.data =
1360 		rte_malloc(NULL, xform->ec.pkey.length, 0);
1361 	if (qat_session->xform.ec.pkey.length &&
1362 		qat_session->xform.ec.pkey.data == NULL)
1363 		return -ENOMEM;
1364 	qat_session->xform.ec.q.x.data = rte_malloc(NULL,
1365 		xform->ec.q.x.length, 0);
1366 	if (qat_session->xform.ec.q.x.length &&
1367 		qat_session->xform.ec.q.x.data == NULL) {
1368 		rte_free(qat_session->xform.ec.pkey.data);
1369 		return -ENOMEM;
1370 	}
1371 	qat_session->xform.ec.q.y.data = rte_malloc(NULL,
1372 		xform->ec.q.y.length, 0);
1373 	if (qat_session->xform.ec.q.y.length &&
1374 		qat_session->xform.ec.q.y.data == NULL) {
1375 		rte_free(qat_session->xform.ec.pkey.data);
1376 		rte_free(qat_session->xform.ec.q.x.data);
1377 		return -ENOMEM;
1378 	}
1379 
1380 	memcpy(qat_session->xform.ec.pkey.data, pkey,
1381 		xform->ec.pkey.length);
1382 	qat_session->xform.ec.pkey.length = xform->ec.pkey.length;
1383 	memcpy(qat_session->xform.ec.q.x.data, q_x,
1384 		xform->ec.q.x.length);
1385 	qat_session->xform.ec.q.x.length = xform->ec.q.x.length;
1386 	memcpy(qat_session->xform.ec.q.y.data, q_y,
1387 		xform->ec.q.y.length);
1388 	qat_session->xform.ec.q.y.length = xform->ec.q.y.length;
1389 	qat_session->xform.ec.curve_id = xform->ec.curve_id;
1390 
1391 	return 0;
1392 
1393 }
1394 
1395 int
1396 qat_asym_session_configure(struct rte_cryptodev *dev __rte_unused,
1397 		struct rte_crypto_asym_xform *xform,
1398 		struct rte_cryptodev_asym_session *session)
1399 {
1400 	struct qat_cryptodev_private *crypto_qat;
1401 	struct qat_asym_session *qat_session;
1402 	int ret = 0;
1403 
1404 	crypto_qat = dev->data->dev_private;
1405 	qat_session = (struct qat_asym_session *) session->sess_private_data;
1406 	memset(qat_session, 0, sizeof(*qat_session));
1407 
1408 	qat_session->xform.xform_type = xform->xform_type;
1409 	switch (xform->xform_type) {
1410 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
1411 		ret = session_set_modexp(qat_session, xform);
1412 		break;
1413 	case RTE_CRYPTO_ASYM_XFORM_MODINV:
1414 		ret = session_set_modinv(qat_session, xform);
1415 		break;
1416 	case RTE_CRYPTO_ASYM_XFORM_RSA: {
1417 		if (unlikely((xform->rsa.n.length < RSA_MODULUS_2048_BITS)
1418 				&& (crypto_qat->qat_dev->options.legacy_alg == 0))) {
1419 			ret = -ENOTSUP;
1420 			return ret;
1421 		}
1422 		ret = session_set_rsa(qat_session, xform);
1423 		}
1424 		break;
1425 	case RTE_CRYPTO_ASYM_XFORM_ECDSA:
1426 	case RTE_CRYPTO_ASYM_XFORM_ECPM:
1427 	case RTE_CRYPTO_ASYM_XFORM_ECDH:
1428 		ret = session_set_ec(qat_session, xform);
1429 		break;
1430 	case RTE_CRYPTO_ASYM_XFORM_SM2:
1431 		break;
1432 	default:
1433 		ret = -ENOTSUP;
1434 	}
1435 
1436 	if (ret) {
1437 		QAT_LOG(ERR, "Unsupported xform type");
1438 		return ret;
1439 	}
1440 
1441 	return 0;
1442 }
1443 
1444 unsigned int
1445 qat_asym_session_get_private_size(struct rte_cryptodev *dev __rte_unused)
1446 {
1447 	return RTE_ALIGN_CEIL(sizeof(struct qat_asym_session), 8);
1448 }
1449 
1450 static void
1451 session_clear_modexp(struct rte_crypto_modex_xform *modex)
1452 {
1453 	PARAM_CLR(modex->modulus);
1454 	PARAM_CLR(modex->exponent);
1455 }
1456 
1457 static void
1458 session_clear_modinv(struct rte_crypto_modinv_xform *modinv)
1459 {
1460 	PARAM_CLR(modinv->modulus);
1461 }
1462 
1463 static void
1464 session_clear_rsa(struct rte_crypto_rsa_xform *rsa)
1465 {
1466 	PARAM_CLR(rsa->n);
1467 	PARAM_CLR(rsa->e);
1468 	if (rsa->key_type == RTE_RSA_KEY_TYPE_EXP) {
1469 		PARAM_CLR(rsa->d);
1470 	} else {
1471 		PARAM_CLR(rsa->qt.p);
1472 		PARAM_CLR(rsa->qt.q);
1473 		PARAM_CLR(rsa->qt.dP);
1474 		PARAM_CLR(rsa->qt.dQ);
1475 		PARAM_CLR(rsa->qt.qInv);
1476 	}
1477 }
1478 
1479 static void
1480 session_clear_xform(struct qat_asym_session *qat_session)
1481 {
1482 	switch (qat_session->xform.xform_type) {
1483 	case RTE_CRYPTO_ASYM_XFORM_MODEX:
1484 		session_clear_modexp(&qat_session->xform.modex);
1485 		break;
1486 	case RTE_CRYPTO_ASYM_XFORM_MODINV:
1487 		session_clear_modinv(&qat_session->xform.modinv);
1488 		break;
1489 	case RTE_CRYPTO_ASYM_XFORM_RSA:
1490 		session_clear_rsa(&qat_session->xform.rsa);
1491 		break;
1492 	default:
1493 		break;
1494 	}
1495 }
1496 
1497 void
1498 qat_asym_session_clear(struct rte_cryptodev *dev,
1499 		struct rte_cryptodev_asym_session *session)
1500 {
1501 	void *sess_priv = session->sess_private_data;
1502 	struct qat_asym_session *qat_session =
1503 		(struct qat_asym_session *)sess_priv;
1504 
1505 	if (sess_priv) {
1506 		session_clear_xform(qat_session);
1507 		memset(qat_session, 0, qat_asym_session_get_private_size(dev));
1508 	}
1509 }
1510 
1511 static uint16_t
1512 qat_asym_crypto_enqueue_op_burst(void *qp, struct rte_crypto_op **ops,
1513 		uint16_t nb_ops)
1514 {
1515 	return qat_enqueue_op_burst(qp, qat_asym_build_request, (void **)ops,
1516 			nb_ops);
1517 }
1518 
1519 static uint16_t
1520 qat_asym_crypto_dequeue_op_burst(void *qp, struct rte_crypto_op **ops,
1521 		uint16_t nb_ops)
1522 {
1523 	return qat_dequeue_op_burst(qp, (void **)ops, qat_asym_process_response,
1524 				nb_ops);
1525 }
1526 
1527 void
1528 qat_asym_init_op_cookie(void *op_cookie)
1529 {
1530 	int j;
1531 	struct qat_asym_op_cookie *cookie = op_cookie;
1532 
1533 	cookie->input_addr = rte_mempool_virt2iova(cookie) +
1534 			offsetof(struct qat_asym_op_cookie,
1535 					input_params_ptrs);
1536 
1537 	cookie->output_addr = rte_mempool_virt2iova(cookie) +
1538 			offsetof(struct qat_asym_op_cookie,
1539 					output_params_ptrs);
1540 
1541 	for (j = 0; j < 8; j++) {
1542 		cookie->input_params_ptrs[j] =
1543 				rte_mempool_virt2iova(cookie) +
1544 				offsetof(struct qat_asym_op_cookie,
1545 						input_array[j]);
1546 		cookie->output_params_ptrs[j] =
1547 				rte_mempool_virt2iova(cookie) +
1548 				offsetof(struct qat_asym_op_cookie,
1549 						output_array[j]);
1550 	}
1551 }
1552 
1553 static int
1554 qat_asym_dev_create(struct qat_pci_device *qat_pci_dev)
1555 {
1556 	struct qat_cryptodev_private *internals;
1557 	struct rte_cryptodev *cryptodev;
1558 	struct qat_device_info *qat_dev_instance =
1559 		&qat_pci_devs[qat_pci_dev->qat_dev_id];
1560 	struct rte_cryptodev_pmd_init_params init_params = {
1561 		.name = "",
1562 		.socket_id = qat_dev_instance->pci_dev->device.numa_node,
1563 		.private_data_size = sizeof(struct qat_cryptodev_private)
1564 	};
1565 	const struct qat_crypto_gen_dev_ops *gen_dev_ops =
1566 		&qat_asym_gen_dev_ops[qat_pci_dev->qat_dev_gen];
1567 	char name[RTE_CRYPTODEV_NAME_MAX_LEN];
1568 	char capa_memz_name[RTE_CRYPTODEV_NAME_MAX_LEN];
1569 	uint16_t sub_id = qat_dev_instance->pci_dev->id.subsystem_device_id;
1570 	char *cmdline = NULL;
1571 
1572 	snprintf(name, RTE_CRYPTODEV_NAME_MAX_LEN, "%s_%s",
1573 			qat_pci_dev->name, "asym");
1574 	QAT_LOG(DEBUG, "Creating QAT ASYM device %s", name);
1575 
1576 	if (qat_pci_dev->qat_dev_gen == QAT_VQAT &&
1577 		sub_id != ADF_VQAT_ASYM_PCI_SUBSYSTEM_ID) {
1578 		QAT_LOG(ERR, "Device (vqat instance) %s does not support asymmetric crypto",
1579 				name);
1580 		return -EFAULT;
1581 	}
1582 	if (gen_dev_ops->cryptodev_ops == NULL) {
1583 		QAT_LOG(ERR, "Device %s does not support asymmetric crypto",
1584 				name);
1585 		return -(EFAULT);
1586 	}
1587 
1588 	if (rte_eal_process_type() == RTE_PROC_PRIMARY) {
1589 		qat_pci_dev->qat_asym_driver_id =
1590 				qat_asym_driver_id;
1591 	} else if (rte_eal_process_type() == RTE_PROC_SECONDARY) {
1592 		if (qat_pci_dev->qat_asym_driver_id !=
1593 				qat_asym_driver_id) {
1594 			QAT_LOG(ERR,
1595 				"Device %s have different driver id than corresponding device in primary process",
1596 				name);
1597 			return -(EFAULT);
1598 		}
1599 	}
1600 
1601 	/* Populate subset device to use in cryptodev device creation */
1602 	qat_dev_instance->asym_rte_dev.driver = &cryptodev_qat_asym_driver;
1603 	qat_dev_instance->asym_rte_dev.numa_node =
1604 			qat_dev_instance->pci_dev->device.numa_node;
1605 	qat_dev_instance->asym_rte_dev.devargs = NULL;
1606 
1607 	cryptodev = rte_cryptodev_pmd_create(name,
1608 			&(qat_dev_instance->asym_rte_dev), &init_params);
1609 
1610 	if (cryptodev == NULL)
1611 		return -ENODEV;
1612 
1613 	qat_dev_instance->asym_rte_dev.name = cryptodev->data->name;
1614 	cryptodev->driver_id = qat_asym_driver_id;
1615 	cryptodev->dev_ops = gen_dev_ops->cryptodev_ops;
1616 
1617 	cryptodev->enqueue_burst = qat_asym_crypto_enqueue_op_burst;
1618 	cryptodev->dequeue_burst = qat_asym_crypto_dequeue_op_burst;
1619 
1620 	cryptodev->feature_flags = gen_dev_ops->get_feature_flags(qat_pci_dev);
1621 
1622 	if (rte_eal_process_type() != RTE_PROC_PRIMARY)
1623 		return 0;
1624 
1625 	snprintf(capa_memz_name, RTE_CRYPTODEV_NAME_MAX_LEN,
1626 			"QAT_ASYM_CAPA_GEN_%d",
1627 			qat_pci_dev->qat_dev_gen);
1628 
1629 	internals = cryptodev->data->dev_private;
1630 	internals->qat_dev = qat_pci_dev;
1631 	internals->dev_id = cryptodev->data->dev_id;
1632 
1633 	cmdline = qat_dev_cmdline_get_val(qat_pci_dev,
1634 			ASYM_ENQ_THRESHOLD_NAME);
1635 	if (cmdline) {
1636 		internals->min_enq_burst_threshold =
1637 			atoi(cmdline) > MAX_QP_THRESHOLD_SIZE ?
1638 			MAX_QP_THRESHOLD_SIZE :
1639 			atoi(cmdline);
1640 	}
1641 
1642 	if (qat_pci_dev->options.slice_map & ICP_ACCEL_MASK_PKE_SLICE) {
1643 		QAT_LOG(ERR, "Device %s does not support PKE slice",
1644 				name);
1645 		rte_cryptodev_pmd_destroy(cryptodev);
1646 		memset(&qat_dev_instance->asym_rte_dev, 0,
1647 			sizeof(qat_dev_instance->asym_rte_dev));
1648 		return -1;
1649 	}
1650 
1651 	if (gen_dev_ops->get_capabilities(internals,
1652 			capa_memz_name, qat_pci_dev->options.slice_map) < 0) {
1653 		QAT_LOG(ERR,
1654 			"Device cannot obtain capabilities, destroying PMD for %s",
1655 			name);
1656 		rte_cryptodev_pmd_destroy(cryptodev);
1657 		memset(&qat_dev_instance->asym_rte_dev, 0,
1658 			sizeof(qat_dev_instance->asym_rte_dev));
1659 		return -1;
1660 	}
1661 
1662 	qat_pci_dev->pmd[QAT_SERVICE_ASYMMETRIC] = internals;
1663 	internals->service_type = QAT_SERVICE_ASYMMETRIC;
1664 	QAT_LOG(DEBUG, "Created QAT ASYM device %s as cryptodev instance %d",
1665 			cryptodev->data->name, internals->dev_id);
1666 	return 0;
1667 }
1668 
1669 static int
1670 qat_asym_dev_destroy(struct qat_pci_device *qat_pci_dev)
1671 {
1672 	struct rte_cryptodev *cryptodev;
1673 	struct qat_cryptodev_private *dev;
1674 
1675 	if (qat_pci_dev == NULL)
1676 		return -ENODEV;
1677 	dev = qat_pci_dev->pmd[QAT_SERVICE_ASYMMETRIC];
1678 	if (dev == NULL)
1679 		return 0;
1680 	if (rte_eal_process_type() == RTE_PROC_PRIMARY)
1681 		rte_memzone_free(dev->capa_mz);
1682 	/* free crypto device */
1683 	cryptodev = rte_cryptodev_pmd_get_dev(dev->dev_id);
1684 	rte_cryptodev_pmd_destroy(cryptodev);
1685 	qat_pci_devs[qat_pci_dev->qat_dev_id].asym_rte_dev.name = NULL;
1686 	qat_pci_dev->pmd[QAT_SERVICE_ASYMMETRIC] = NULL;
1687 
1688 	return 0;
1689 }
1690 
1691 static struct cryptodev_driver qat_crypto_drv;
1692 RTE_PMD_REGISTER_CRYPTO_DRIVER(qat_crypto_drv,
1693 		cryptodev_qat_asym_driver,
1694 		qat_asym_driver_id);
1695 
1696 RTE_INIT(qat_asym_init)
1697 {
1698 	qat_cmdline_defines[QAT_SERVICE_ASYMMETRIC] = arguments;
1699 	qat_service[QAT_SERVICE_ASYMMETRIC].name = "asymmetric crypto";
1700 	qat_service[QAT_SERVICE_ASYMMETRIC].dev_create = qat_asym_dev_create;
1701 	qat_service[QAT_SERVICE_ASYMMETRIC].dev_destroy = qat_asym_dev_destroy;
1702 }
1703