xref: /dpdk/drivers/crypto/uadk/uadk_crypto_pmd.c (revision 4b53e9802b6b6040ad5622b1414aaa93d9581d0c)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright 2022-2023 Huawei Technologies Co.,Ltd. All rights reserved.
3  * Copyright 2022-2023 Linaro ltd.
4  */
5 
6 #include <stdlib.h>
7 
8 #include <bus_vdev_driver.h>
9 #include <cryptodev_pmd.h>
10 #include <rte_bus_vdev.h>
11 
12 #include <uadk/wd_cipher.h>
13 #include <uadk/wd_digest.h>
14 #include <uadk/wd_sched.h>
15 
16 #include "uadk_crypto_pmd_private.h"
17 
18 static uint8_t uadk_cryptodev_driver_id;
19 
20 static const struct rte_cryptodev_capabilities uadk_crypto_v2_capabilities[] = {
21 	{	/* MD5 HMAC */
22 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
23 		{.sym = {
24 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
25 			{.auth = {
26 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
27 				.block_size = 64,
28 				.key_size = {
29 					.min = 1,
30 					.max = 64,
31 					.increment = 1
32 				},
33 				.digest_size = {
34 					.min = 1,
35 					.max = 16,
36 					.increment = 1
37 				},
38 				.iv_size = { 0 }
39 			}, }
40 		}, }
41 	},
42 	{	/* MD5 */
43 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
44 		{.sym = {
45 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
46 			{.auth = {
47 				.algo = RTE_CRYPTO_AUTH_MD5,
48 				.block_size = 64,
49 				.key_size = {
50 					.min = 0,
51 					.max = 0,
52 					.increment = 0
53 				},
54 				.digest_size = {
55 					.min = 16,
56 					.max = 16,
57 					.increment = 0
58 				},
59 				.iv_size = { 0 }
60 			}, }
61 		}, }
62 	},
63 	{	/* SHA1 HMAC */
64 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
65 		{.sym = {
66 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
67 			{.auth = {
68 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
69 				.block_size = 64,
70 				.key_size = {
71 					.min = 1,
72 					.max = 64,
73 					.increment = 1
74 				},
75 				.digest_size = {
76 					.min = 1,
77 					.max = 20,
78 					.increment = 1
79 				},
80 				.iv_size = { 0 }
81 			}, }
82 		}, }
83 	},
84 	{	/* SHA1 */
85 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
86 		{.sym = {
87 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
88 			{.auth = {
89 				.algo = RTE_CRYPTO_AUTH_SHA1,
90 				.block_size = 64,
91 				.key_size = {
92 					.min = 0,
93 					.max = 0,
94 					.increment = 0
95 				},
96 				.digest_size = {
97 					.min = 20,
98 					.max = 20,
99 					.increment = 0
100 				},
101 				.iv_size = { 0 }
102 			}, }
103 		}, }
104 	},
105 	{	/* SHA224 HMAC */
106 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
107 		{.sym = {
108 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
109 			{.auth = {
110 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
111 				.block_size = 64,
112 				.key_size = {
113 					.min = 1,
114 					.max = 64,
115 					.increment = 1
116 				},
117 				.digest_size = {
118 					.min = 1,
119 					.max = 28,
120 					.increment = 1
121 				},
122 				.iv_size = { 0 }
123 			}, }
124 		}, }
125 	},
126 	{	/* SHA224 */
127 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
128 		{.sym = {
129 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
130 			{.auth = {
131 				.algo = RTE_CRYPTO_AUTH_SHA224,
132 				.block_size = 64,
133 					.key_size = {
134 					.min = 0,
135 					.max = 0,
136 					.increment = 0
137 				},
138 				.digest_size = {
139 					.min = 1,
140 					.max = 28,
141 					.increment = 1
142 				},
143 				.iv_size = { 0 }
144 			}, }
145 		}, }
146 	},
147 	{	/* SHA256 HMAC */
148 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
149 		{.sym = {
150 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
151 			{.auth = {
152 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
153 				.block_size = 64,
154 				.key_size = {
155 					.min = 1,
156 					.max = 64,
157 					.increment = 1
158 				},
159 				.digest_size = {
160 					.min = 1,
161 					.max = 32,
162 					.increment = 1
163 				},
164 				.iv_size = { 0 }
165 			}, }
166 		}, }
167 	},
168 	{	/* SHA256 */
169 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
170 		{.sym = {
171 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
172 			{.auth = {
173 				.algo = RTE_CRYPTO_AUTH_SHA256,
174 				.block_size = 64,
175 				.key_size = {
176 					.min = 0,
177 					.max = 0,
178 					.increment = 0
179 				},
180 				.digest_size = {
181 					.min = 32,
182 					.max = 32,
183 					.increment = 0
184 				},
185 				.iv_size = { 0 }
186 			}, }
187 		}, }
188 	},
189 	{	/* SHA384 HMAC */
190 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
191 		{.sym = {
192 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
193 			{.auth = {
194 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
195 				.block_size = 128,
196 				.key_size = {
197 					.min = 1,
198 					.max = 128,
199 					.increment = 1
200 				},
201 				.digest_size = {
202 					.min = 1,
203 					.max = 48,
204 					.increment = 1
205 				},
206 				.iv_size = { 0 }
207 			}, }
208 		}, }
209 	},
210 	{	/* SHA384 */
211 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
212 		{.sym = {
213 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
214 			{.auth = {
215 				.algo = RTE_CRYPTO_AUTH_SHA384,
216 				.block_size = 128,
217 				.key_size = {
218 					.min = 0,
219 					.max = 0,
220 					.increment = 0
221 				},
222 				.digest_size = {
223 					.min = 48,
224 					.max = 48,
225 					.increment = 0
226 				},
227 				.iv_size = { 0 }
228 			}, }
229 		}, }
230 	},
231 	{	/* SHA512 HMAC */
232 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
233 		{.sym = {
234 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
235 			{.auth = {
236 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
237 				.block_size = 128,
238 				.key_size = {
239 					.min = 1,
240 					.max = 128,
241 					.increment = 1
242 				},
243 				.digest_size = {
244 					.min = 1,
245 					.max = 64,
246 					.increment = 1
247 				},
248 				.iv_size = { 0 }
249 			}, }
250 		}, }
251 	},
252 	{	/* SHA512 */
253 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
254 		{.sym = {
255 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
256 			{.auth = {
257 				.algo = RTE_CRYPTO_AUTH_SHA512,
258 				.block_size = 128,
259 				.key_size = {
260 					.min = 0,
261 					.max = 0,
262 					.increment = 0
263 				},
264 				.digest_size = {
265 					.min = 64,
266 					.max = 64,
267 					.increment = 0
268 				},
269 				.iv_size = { 0 }
270 			}, }
271 		}, }
272 	},
273 	{	/* AES ECB */
274 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
275 		{.sym = {
276 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
277 			{.cipher = {
278 				.algo = RTE_CRYPTO_CIPHER_AES_ECB,
279 				.block_size = 16,
280 				.key_size = {
281 					.min = 16,
282 					.max = 32,
283 					.increment = 8
284 				},
285 				.iv_size = {
286 					.min = 0,
287 					.max = 0,
288 					.increment = 0
289 				}
290 			}, }
291 		}, }
292 	},
293 	{	/* AES CBC */
294 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
295 		{.sym = {
296 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
297 			{.cipher = {
298 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
299 				.block_size = 16,
300 				.key_size = {
301 					.min = 16,
302 					.max = 32,
303 					.increment = 8
304 				},
305 				.iv_size = {
306 					.min = 16,
307 					.max = 16,
308 					.increment = 0
309 				}
310 			}, }
311 		}, }
312 	},
313 	{	/* AES XTS */
314 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
315 		{.sym = {
316 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
317 			{.cipher = {
318 				.algo = RTE_CRYPTO_CIPHER_AES_XTS,
319 				.block_size = 1,
320 				.key_size = {
321 					.min = 32,
322 					.max = 64,
323 					.increment = 32
324 				},
325 				.iv_size = {
326 					.min = 0,
327 					.max = 0,
328 					.increment = 0
329 				}
330 			}, }
331 		}, }
332 	},
333 	{	/* DES CBC */
334 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
335 		{.sym = {
336 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
337 			{.cipher = {
338 				.algo = RTE_CRYPTO_CIPHER_DES_CBC,
339 				.block_size = 8,
340 				.key_size = {
341 					.min = 8,
342 					.max = 8,
343 					.increment = 0
344 				},
345 				.iv_size = {
346 					.min = 8,
347 					.max = 8,
348 					.increment = 0
349 				}
350 			}, }
351 		}, }
352 	},
353 	/* End of capabilities */
354 	RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
355 };
356 
357 /* Configure device */
358 static int
359 uadk_crypto_pmd_config(struct rte_cryptodev *dev __rte_unused,
360 		       struct rte_cryptodev_config *config __rte_unused)
361 {
362 	return 0;
363 }
364 
365 /* Start device */
366 static int
367 uadk_crypto_pmd_start(struct rte_cryptodev *dev __rte_unused)
368 {
369 	return 0;
370 }
371 
372 /* Stop device */
373 static void
374 uadk_crypto_pmd_stop(struct rte_cryptodev *dev __rte_unused)
375 {
376 }
377 
378 /* Close device */
379 static int
380 uadk_crypto_pmd_close(struct rte_cryptodev *dev)
381 {
382 	struct uadk_crypto_priv *priv = dev->data->dev_private;
383 
384 	if (priv->env_cipher_init) {
385 		wd_cipher_env_uninit();
386 		priv->env_cipher_init = false;
387 	}
388 
389 	if (priv->env_auth_init) {
390 		wd_digest_env_uninit();
391 		priv->env_auth_init = false;
392 	}
393 
394 	return 0;
395 }
396 
397 /* Get device statistics */
398 static void
399 uadk_crypto_pmd_stats_get(struct rte_cryptodev *dev,
400 			  struct rte_cryptodev_stats *stats)
401 {
402 	int qp_id;
403 
404 	for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
405 		struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
406 
407 		stats->enqueued_count += qp->qp_stats.enqueued_count;
408 		stats->dequeued_count += qp->qp_stats.dequeued_count;
409 		stats->enqueue_err_count += qp->qp_stats.enqueue_err_count;
410 		stats->dequeue_err_count += qp->qp_stats.dequeue_err_count;
411 	}
412 }
413 
414 /* Reset device statistics */
415 static void
416 uadk_crypto_pmd_stats_reset(struct rte_cryptodev *dev __rte_unused)
417 {
418 	int qp_id;
419 
420 	for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
421 		struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
422 
423 		memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
424 	}
425 }
426 
427 /* Get device info */
428 static void
429 uadk_crypto_pmd_info_get(struct rte_cryptodev *dev,
430 			 struct rte_cryptodev_info *dev_info)
431 {
432 	struct uadk_crypto_priv *priv = dev->data->dev_private;
433 
434 	if (dev_info != NULL) {
435 		dev_info->driver_id = dev->driver_id;
436 		dev_info->driver_name = dev->device->driver->name;
437 		dev_info->max_nb_queue_pairs = 128;
438 		/* No limit of number of sessions */
439 		dev_info->sym.max_nb_sessions = 0;
440 		dev_info->feature_flags = dev->feature_flags;
441 
442 		if (priv->version == UADK_CRYPTO_V2)
443 			dev_info->capabilities = uadk_crypto_v2_capabilities;
444 	}
445 }
446 
447 /* Release queue pair */
448 static int
449 uadk_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
450 {
451 	struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
452 
453 	if (qp) {
454 		rte_ring_free(qp->processed_pkts);
455 		rte_free(qp);
456 		dev->data->queue_pairs[qp_id] = NULL;
457 	}
458 
459 	return 0;
460 }
461 
462 /* set a unique name for the queue pair based on its name, dev_id and qp_id */
463 static int
464 uadk_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
465 			    struct uadk_qp *qp)
466 {
467 	unsigned int n = snprintf(qp->name, sizeof(qp->name),
468 				  "uadk_crypto_pmd_%u_qp_%u",
469 				  dev->data->dev_id, qp->id);
470 
471 	if (n >= sizeof(qp->name))
472 		return -EINVAL;
473 
474 	return 0;
475 }
476 
477 /* Create a ring to place process packets on */
478 static struct rte_ring *
479 uadk_pmd_qp_create_processed_pkts_ring(struct uadk_qp *qp,
480 				       unsigned int ring_size, int socket_id)
481 {
482 	struct rte_ring *r = qp->processed_pkts;
483 
484 	if (r) {
485 		if (rte_ring_get_size(r) >= ring_size) {
486 			UADK_LOG(INFO, "Reusing existing ring %s for processed packets",
487 				 qp->name);
488 			return r;
489 		}
490 
491 		UADK_LOG(ERR, "Unable to reuse existing ring %s for processed packets",
492 			 qp->name);
493 		return NULL;
494 	}
495 
496 	return rte_ring_create(qp->name, ring_size, socket_id,
497 			       RING_F_EXACT_SZ);
498 }
499 
500 static int
501 uadk_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
502 			 const struct rte_cryptodev_qp_conf *qp_conf,
503 			 int socket_id)
504 {
505 	struct uadk_qp *qp;
506 
507 	/* Free memory prior to re-allocation if needed. */
508 	if (dev->data->queue_pairs[qp_id] != NULL)
509 		uadk_crypto_pmd_qp_release(dev, qp_id);
510 
511 	/* Allocate the queue pair data structure. */
512 	qp = rte_zmalloc_socket("uadk PMD Queue Pair", sizeof(*qp),
513 				RTE_CACHE_LINE_SIZE, socket_id);
514 	if (qp == NULL)
515 		return (-ENOMEM);
516 
517 	qp->id = qp_id;
518 	dev->data->queue_pairs[qp_id] = qp;
519 
520 	if (uadk_pmd_qp_set_unique_name(dev, qp))
521 		goto qp_setup_cleanup;
522 
523 	qp->processed_pkts = uadk_pmd_qp_create_processed_pkts_ring(qp,
524 				qp_conf->nb_descriptors, socket_id);
525 	if (qp->processed_pkts == NULL)
526 		goto qp_setup_cleanup;
527 
528 	memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
529 
530 	return 0;
531 
532 qp_setup_cleanup:
533 	if (qp) {
534 		rte_free(qp);
535 		qp = NULL;
536 	}
537 	return -EINVAL;
538 }
539 
540 static unsigned int
541 uadk_crypto_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
542 {
543 	return sizeof(struct uadk_crypto_session);
544 }
545 
546 static enum uadk_chain_order
547 uadk_get_chain_order(const struct rte_crypto_sym_xform *xform)
548 {
549 	enum uadk_chain_order res = UADK_CHAIN_NOT_SUPPORTED;
550 
551 	if (xform != NULL) {
552 		if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
553 			if (xform->next == NULL)
554 				res = UADK_CHAIN_ONLY_AUTH;
555 			else if (xform->next->type ==
556 					RTE_CRYPTO_SYM_XFORM_CIPHER)
557 				res = UADK_CHAIN_AUTH_CIPHER;
558 		}
559 
560 		if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
561 			if (xform->next == NULL)
562 				res = UADK_CHAIN_ONLY_CIPHER;
563 			else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
564 				res = UADK_CHAIN_CIPHER_AUTH;
565 		}
566 	}
567 
568 	return res;
569 }
570 
571 static int
572 uadk_set_session_cipher_parameters(struct rte_cryptodev *dev,
573 				   struct uadk_crypto_session *sess,
574 				   struct rte_crypto_sym_xform *xform)
575 {
576 	struct uadk_crypto_priv *priv = dev->data->dev_private;
577 	struct rte_crypto_cipher_xform *cipher = &xform->cipher;
578 	struct wd_cipher_sess_setup setup = {0};
579 	struct sched_params params = {0};
580 	int ret;
581 
582 	if (!priv->env_cipher_init) {
583 		ret = wd_cipher_env_init(NULL);
584 		if (ret < 0)
585 			return -EINVAL;
586 		priv->env_cipher_init = true;
587 	}
588 
589 	sess->cipher.direction = cipher->op;
590 	sess->iv.offset = cipher->iv.offset;
591 	sess->iv.length = cipher->iv.length;
592 
593 	switch (cipher->algo) {
594 	/* Cover supported cipher algorithms */
595 	case RTE_CRYPTO_CIPHER_AES_CTR:
596 		setup.alg = WD_CIPHER_AES;
597 		setup.mode = WD_CIPHER_CTR;
598 		sess->cipher.req.out_bytes = 64;
599 		break;
600 	case RTE_CRYPTO_CIPHER_AES_ECB:
601 		setup.alg = WD_CIPHER_AES;
602 		setup.mode = WD_CIPHER_ECB;
603 		sess->cipher.req.out_bytes = 16;
604 		break;
605 	case RTE_CRYPTO_CIPHER_AES_CBC:
606 		setup.alg = WD_CIPHER_AES;
607 		setup.mode = WD_CIPHER_CBC;
608 		if (cipher->key.length == 16)
609 			sess->cipher.req.out_bytes = 16;
610 		else
611 			sess->cipher.req.out_bytes = 64;
612 		break;
613 	case RTE_CRYPTO_CIPHER_AES_XTS:
614 		setup.alg = WD_CIPHER_AES;
615 		setup.mode = WD_CIPHER_XTS;
616 		if (cipher->key.length == 16)
617 			sess->cipher.req.out_bytes = 32;
618 		else
619 			sess->cipher.req.out_bytes = 512;
620 		break;
621 	default:
622 		ret = -ENOTSUP;
623 		goto env_uninit;
624 	}
625 
626 	params.numa_id = -1;	/* choose nearby numa node */
627 	setup.sched_param = &params;
628 	sess->handle_cipher = wd_cipher_alloc_sess(&setup);
629 	if (!sess->handle_cipher) {
630 		UADK_LOG(ERR, "uadk failed to alloc session!\n");
631 		ret = -EINVAL;
632 		goto env_uninit;
633 	}
634 
635 	ret = wd_cipher_set_key(sess->handle_cipher, cipher->key.data, cipher->key.length);
636 	if (ret) {
637 		wd_cipher_free_sess(sess->handle_cipher);
638 		UADK_LOG(ERR, "uadk failed to set key!\n");
639 		ret = -EINVAL;
640 		goto env_uninit;
641 	}
642 
643 	return 0;
644 
645 env_uninit:
646 	wd_cipher_env_uninit();
647 	priv->env_cipher_init = false;
648 	return ret;
649 }
650 
651 /* Set session auth parameters */
652 static int
653 uadk_set_session_auth_parameters(struct rte_cryptodev *dev,
654 				 struct uadk_crypto_session *sess,
655 				 struct rte_crypto_sym_xform *xform)
656 {
657 	struct uadk_crypto_priv *priv = dev->data->dev_private;
658 	struct wd_digest_sess_setup setup = {0};
659 	struct sched_params params = {0};
660 	int ret;
661 
662 	if (!priv->env_auth_init) {
663 		ret = wd_digest_env_init(NULL);
664 		if (ret < 0)
665 			return -EINVAL;
666 		priv->env_auth_init = true;
667 	}
668 
669 	sess->auth.operation = xform->auth.op;
670 	sess->auth.digest_length = xform->auth.digest_length;
671 
672 	switch (xform->auth.algo) {
673 	case RTE_CRYPTO_AUTH_MD5:
674 	case RTE_CRYPTO_AUTH_MD5_HMAC:
675 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_MD5) ?
676 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
677 		setup.alg = WD_DIGEST_MD5;
678 		sess->auth.req.out_buf_bytes = 16;
679 		sess->auth.req.out_bytes = 16;
680 		break;
681 	case RTE_CRYPTO_AUTH_SHA1:
682 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
683 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA1) ?
684 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
685 		setup.alg = WD_DIGEST_SHA1;
686 		sess->auth.req.out_buf_bytes = 20;
687 		sess->auth.req.out_bytes = 20;
688 		break;
689 	case RTE_CRYPTO_AUTH_SHA224:
690 	case RTE_CRYPTO_AUTH_SHA224_HMAC:
691 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA224) ?
692 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
693 		setup.alg = WD_DIGEST_SHA224;
694 		sess->auth.req.out_buf_bytes = 28;
695 		sess->auth.req.out_bytes = 28;
696 		break;
697 	case RTE_CRYPTO_AUTH_SHA256:
698 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
699 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA256) ?
700 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
701 		setup.alg = WD_DIGEST_SHA256;
702 		sess->auth.req.out_buf_bytes = 32;
703 		sess->auth.req.out_bytes = 32;
704 		break;
705 	case RTE_CRYPTO_AUTH_SHA384:
706 	case RTE_CRYPTO_AUTH_SHA384_HMAC:
707 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA384) ?
708 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
709 		setup.alg = WD_DIGEST_SHA384;
710 		sess->auth.req.out_buf_bytes = 48;
711 		sess->auth.req.out_bytes = 48;
712 		break;
713 	case RTE_CRYPTO_AUTH_SHA512:
714 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
715 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA512) ?
716 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
717 		setup.alg = WD_DIGEST_SHA512;
718 		sess->auth.req.out_buf_bytes = 64;
719 		sess->auth.req.out_bytes = 64;
720 		break;
721 	default:
722 		ret = -ENOTSUP;
723 		goto env_uninit;
724 	}
725 
726 	params.numa_id = -1;	/* choose nearby numa node */
727 	setup.sched_param = &params;
728 	sess->handle_digest = wd_digest_alloc_sess(&setup);
729 	if (!sess->handle_digest) {
730 		UADK_LOG(ERR, "uadk failed to alloc session!\n");
731 		ret = -EINVAL;
732 		goto env_uninit;
733 	}
734 
735 	/* if mode is HMAC, should set key */
736 	if (setup.mode == WD_DIGEST_HMAC) {
737 		ret = wd_digest_set_key(sess->handle_digest,
738 					xform->auth.key.data,
739 					xform->auth.key.length);
740 		if (ret) {
741 			UADK_LOG(ERR, "uadk failed to alloc session!\n");
742 			wd_digest_free_sess(sess->handle_digest);
743 			sess->handle_digest = 0;
744 			ret = -EINVAL;
745 			goto env_uninit;
746 		}
747 	}
748 
749 	return 0;
750 
751 env_uninit:
752 	wd_digest_env_uninit();
753 	priv->env_auth_init = false;
754 	return ret;
755 }
756 
757 static int
758 uadk_crypto_sym_session_configure(struct rte_cryptodev *dev,
759 				  struct rte_crypto_sym_xform *xform,
760 				  struct rte_cryptodev_sym_session *session)
761 {
762 	struct rte_crypto_sym_xform *cipher_xform = NULL;
763 	struct rte_crypto_sym_xform *auth_xform = NULL;
764 	struct uadk_crypto_session *sess = CRYPTODEV_GET_SYM_SESS_PRIV(session);
765 	int ret;
766 
767 	if (unlikely(!sess)) {
768 		UADK_LOG(ERR, "Session not available");
769 		return -EINVAL;
770 	}
771 
772 	sess->chain_order = uadk_get_chain_order(xform);
773 	switch (sess->chain_order) {
774 	case UADK_CHAIN_ONLY_CIPHER:
775 		cipher_xform = xform;
776 		break;
777 	case UADK_CHAIN_ONLY_AUTH:
778 		auth_xform = xform;
779 		break;
780 	case UADK_CHAIN_CIPHER_AUTH:
781 		cipher_xform = xform;
782 		auth_xform = xform->next;
783 		break;
784 	case UADK_CHAIN_AUTH_CIPHER:
785 		auth_xform = xform;
786 		cipher_xform = xform->next;
787 		break;
788 	default:
789 		return -ENOTSUP;
790 	}
791 
792 	if (cipher_xform) {
793 		ret = uadk_set_session_cipher_parameters(dev, sess, cipher_xform);
794 		if (ret != 0) {
795 			UADK_LOG(ERR,
796 				"Invalid/unsupported cipher parameters");
797 			return ret;
798 		}
799 	}
800 
801 	if (auth_xform) {
802 		ret = uadk_set_session_auth_parameters(dev, sess, auth_xform);
803 		if (ret != 0) {
804 			UADK_LOG(ERR,
805 				"Invalid/unsupported auth parameters");
806 			return ret;
807 		}
808 	}
809 
810 	return 0;
811 }
812 
813 static void
814 uadk_crypto_sym_session_clear(struct rte_cryptodev *dev __rte_unused,
815 			      struct rte_cryptodev_sym_session *session)
816 {
817 	struct uadk_crypto_session *sess = CRYPTODEV_GET_SYM_SESS_PRIV(session);
818 
819 	if (unlikely(sess == NULL)) {
820 		UADK_LOG(ERR, "Session not available");
821 		return;
822 	}
823 
824 	if (sess->handle_cipher) {
825 		wd_cipher_free_sess(sess->handle_cipher);
826 		sess->handle_cipher = 0;
827 	}
828 
829 	if (sess->handle_digest) {
830 		wd_digest_free_sess(sess->handle_digest);
831 		sess->handle_digest = 0;
832 	}
833 }
834 
835 static struct rte_cryptodev_ops uadk_crypto_pmd_ops = {
836 		.dev_configure		= uadk_crypto_pmd_config,
837 		.dev_start		= uadk_crypto_pmd_start,
838 		.dev_stop		= uadk_crypto_pmd_stop,
839 		.dev_close		= uadk_crypto_pmd_close,
840 		.stats_get		= uadk_crypto_pmd_stats_get,
841 		.stats_reset		= uadk_crypto_pmd_stats_reset,
842 		.dev_infos_get		= uadk_crypto_pmd_info_get,
843 		.queue_pair_setup	= uadk_crypto_pmd_qp_setup,
844 		.queue_pair_release	= uadk_crypto_pmd_qp_release,
845 		.sym_session_get_size	= uadk_crypto_sym_session_get_size,
846 		.sym_session_configure	= uadk_crypto_sym_session_configure,
847 		.sym_session_clear	= uadk_crypto_sym_session_clear,
848 };
849 
850 static void
851 uadk_process_cipher_op(struct rte_crypto_op *op,
852 		       struct uadk_crypto_session *sess,
853 		       struct rte_mbuf *msrc, struct rte_mbuf *mdst)
854 {
855 	uint32_t off = op->sym->cipher.data.offset;
856 	int ret;
857 
858 	if (!sess) {
859 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
860 		return;
861 	}
862 
863 	sess->cipher.req.src = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
864 	sess->cipher.req.in_bytes = op->sym->cipher.data.length;
865 	sess->cipher.req.dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *, off);
866 	sess->cipher.req.out_buf_bytes = sess->cipher.req.in_bytes;
867 	sess->cipher.req.iv_bytes = sess->iv.length;
868 	sess->cipher.req.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
869 							sess->iv.offset);
870 	if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
871 		sess->cipher.req.op_type = WD_CIPHER_ENCRYPTION;
872 	else
873 		sess->cipher.req.op_type = WD_CIPHER_DECRYPTION;
874 
875 	do {
876 		ret = wd_do_cipher_sync(sess->handle_cipher, &sess->cipher.req);
877 	} while (ret == -WD_EBUSY);
878 
879 	if (ret)
880 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
881 }
882 
883 static void
884 uadk_process_auth_op(struct uadk_qp *qp, struct rte_crypto_op *op,
885 		     struct uadk_crypto_session *sess,
886 		     struct rte_mbuf *msrc, struct rte_mbuf *mdst)
887 {
888 	uint32_t srclen = op->sym->auth.data.length;
889 	uint32_t off = op->sym->auth.data.offset;
890 	uint8_t *dst = qp->temp_digest;
891 	int ret;
892 
893 	if (!sess) {
894 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
895 		return;
896 	}
897 
898 	sess->auth.req.in = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
899 	sess->auth.req.in_bytes = srclen;
900 	sess->auth.req.out = dst;
901 
902 	do {
903 		ret = wd_do_digest_sync(sess->handle_digest, &sess->auth.req);
904 	} while (ret == -WD_EBUSY);
905 
906 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
907 		if (memcmp(dst, op->sym->auth.digest.data,
908 				sess->auth.digest_length) != 0) {
909 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
910 		}
911 	} else {
912 		uint8_t *auth_dst;
913 
914 		auth_dst = op->sym->auth.digest.data;
915 		if (auth_dst == NULL)
916 			auth_dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *,
917 					op->sym->auth.data.offset +
918 					op->sym->auth.data.length);
919 		memcpy(auth_dst, dst, sess->auth.digest_length);
920 	}
921 
922 	if (ret)
923 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
924 }
925 
926 static uint16_t
927 uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
928 			  uint16_t nb_ops)
929 {
930 	struct uadk_qp *qp = queue_pair;
931 	struct uadk_crypto_session *sess = NULL;
932 	struct rte_mbuf *msrc, *mdst;
933 	struct rte_crypto_op *op;
934 	uint16_t enqd = 0;
935 	int i, ret;
936 
937 	for (i = 0; i < nb_ops; i++) {
938 		op = ops[i];
939 		op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
940 		msrc = op->sym->m_src;
941 		mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
942 
943 		if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
944 			if (likely(op->sym->session != NULL))
945 				sess = CRYPTODEV_GET_SYM_SESS_PRIV(
946 					op->sym->session);
947 		}
948 
949 		switch (sess->chain_order) {
950 		case UADK_CHAIN_ONLY_CIPHER:
951 			uadk_process_cipher_op(op, sess, msrc, mdst);
952 			break;
953 		case UADK_CHAIN_ONLY_AUTH:
954 			uadk_process_auth_op(qp, op, sess, msrc, mdst);
955 			break;
956 		case UADK_CHAIN_CIPHER_AUTH:
957 			uadk_process_cipher_op(op, sess, msrc, mdst);
958 			uadk_process_auth_op(qp, op, sess, mdst, mdst);
959 			break;
960 		case UADK_CHAIN_AUTH_CIPHER:
961 			uadk_process_auth_op(qp, op, sess, msrc, mdst);
962 			uadk_process_cipher_op(op, sess, msrc, mdst);
963 			break;
964 		default:
965 			op->status = RTE_CRYPTO_OP_STATUS_ERROR;
966 			break;
967 		}
968 
969 		if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
970 			op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
971 
972 		if (op->status != RTE_CRYPTO_OP_STATUS_ERROR) {
973 			ret = rte_ring_enqueue(qp->processed_pkts, (void *)op);
974 			if (ret < 0)
975 				goto enqueue_err;
976 			qp->qp_stats.enqueued_count++;
977 			enqd++;
978 		} else {
979 			/* increment count if failed to enqueue op */
980 			qp->qp_stats.enqueue_err_count++;
981 		}
982 	}
983 
984 	return enqd;
985 
986 enqueue_err:
987 	qp->qp_stats.enqueue_err_count++;
988 	return enqd;
989 }
990 
991 static uint16_t
992 uadk_crypto_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
993 			  uint16_t nb_ops)
994 {
995 	struct uadk_qp *qp = queue_pair;
996 	unsigned int nb_dequeued;
997 
998 	nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts,
999 			(void **)ops, nb_ops, NULL);
1000 	qp->qp_stats.dequeued_count += nb_dequeued;
1001 
1002 	return nb_dequeued;
1003 }
1004 
1005 static int
1006 uadk_cryptodev_probe(struct rte_vdev_device *vdev)
1007 {
1008 	struct rte_cryptodev_pmd_init_params init_params = {
1009 		.name = "",
1010 		.private_data_size = sizeof(struct uadk_crypto_priv),
1011 		.max_nb_queue_pairs =
1012 				RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS,
1013 	};
1014 	enum uadk_crypto_version version = UADK_CRYPTO_V2;
1015 	struct uadk_crypto_priv *priv;
1016 	struct rte_cryptodev *dev;
1017 	struct uacce_dev *udev;
1018 	const char *name;
1019 
1020 	udev = wd_get_accel_dev("cipher");
1021 	if (!udev)
1022 		return -ENODEV;
1023 
1024 	if (!strcmp(udev->api, "hisi_qm_v2"))
1025 		version = UADK_CRYPTO_V2;
1026 
1027 	free(udev);
1028 
1029 	name = rte_vdev_device_name(vdev);
1030 	if (name == NULL)
1031 		return -EINVAL;
1032 
1033 	dev = rte_cryptodev_pmd_create(name, &vdev->device, &init_params);
1034 	if (dev == NULL) {
1035 		UADK_LOG(ERR, "driver %s: create failed", init_params.name);
1036 		return -ENODEV;
1037 	}
1038 
1039 	dev->dev_ops = &uadk_crypto_pmd_ops;
1040 	dev->driver_id = uadk_cryptodev_driver_id;
1041 	dev->dequeue_burst = uadk_crypto_dequeue_burst;
1042 	dev->enqueue_burst = uadk_crypto_enqueue_burst;
1043 	dev->feature_flags = RTE_CRYPTODEV_FF_HW_ACCELERATED |
1044 			     RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO;
1045 	priv = dev->data->dev_private;
1046 	priv->version = version;
1047 
1048 	rte_cryptodev_pmd_probing_finish(dev);
1049 
1050 	return 0;
1051 }
1052 
1053 static int
1054 uadk_cryptodev_remove(struct rte_vdev_device *vdev)
1055 {
1056 	struct rte_cryptodev *cryptodev;
1057 	const char *name;
1058 
1059 	name = rte_vdev_device_name(vdev);
1060 	if (name == NULL)
1061 		return -EINVAL;
1062 
1063 	cryptodev = rte_cryptodev_pmd_get_named_dev(name);
1064 	if (cryptodev == NULL)
1065 		return -ENODEV;
1066 
1067 	return rte_cryptodev_pmd_destroy(cryptodev);
1068 }
1069 
1070 static struct rte_vdev_driver uadk_crypto_pmd = {
1071 	.probe       = uadk_cryptodev_probe,
1072 	.remove      = uadk_cryptodev_remove,
1073 };
1074 
1075 static struct cryptodev_driver uadk_crypto_drv;
1076 
1077 #define UADK_CRYPTO_DRIVER_NAME crypto_uadk
1078 RTE_PMD_REGISTER_VDEV(UADK_CRYPTO_DRIVER_NAME, uadk_crypto_pmd);
1079 RTE_PMD_REGISTER_CRYPTO_DRIVER(uadk_crypto_drv, uadk_crypto_pmd.driver,
1080 			       uadk_cryptodev_driver_id);
1081 RTE_LOG_REGISTER_DEFAULT(uadk_crypto_logtype, INFO);
1082