xref: /dpdk/drivers/crypto/uadk/uadk_crypto_pmd.c (revision f665790a5dbad7b645ff46f31d65e977324e7bfc)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright 2022-2023 Huawei Technologies Co.,Ltd. All rights reserved.
3  * Copyright 2022-2023 Linaro ltd.
4  */
5 
6 #include <stdlib.h>
7 
8 #include <bus_vdev_driver.h>
9 #include <cryptodev_pmd.h>
10 #include <rte_bus_vdev.h>
11 
12 #include <uadk/wd_cipher.h>
13 #include <uadk/wd_digest.h>
14 #include <uadk/wd_sched.h>
15 
16 #include "uadk_crypto_pmd_private.h"
17 
18 static uint8_t uadk_cryptodev_driver_id;
19 
20 static const struct rte_cryptodev_capabilities uadk_crypto_v2_capabilities[] = {
21 	{	/* MD5 HMAC */
22 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
23 		{.sym = {
24 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
25 			{.auth = {
26 				.algo = RTE_CRYPTO_AUTH_MD5_HMAC,
27 				.block_size = 64,
28 				.key_size = {
29 					.min = 1,
30 					.max = 64,
31 					.increment = 1
32 				},
33 				.digest_size = {
34 					.min = 1,
35 					.max = 16,
36 					.increment = 1
37 				},
38 				.iv_size = { 0 }
39 			}, }
40 		}, }
41 	},
42 	{	/* MD5 */
43 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
44 		{.sym = {
45 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
46 			{.auth = {
47 				.algo = RTE_CRYPTO_AUTH_MD5,
48 				.block_size = 64,
49 				.key_size = {
50 					.min = 0,
51 					.max = 0,
52 					.increment = 0
53 				},
54 				.digest_size = {
55 					.min = 16,
56 					.max = 16,
57 					.increment = 0
58 				},
59 				.iv_size = { 0 }
60 			}, }
61 		}, }
62 	},
63 	{	/* SHA1 HMAC */
64 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
65 		{.sym = {
66 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
67 			{.auth = {
68 				.algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
69 				.block_size = 64,
70 				.key_size = {
71 					.min = 1,
72 					.max = 64,
73 					.increment = 1
74 				},
75 				.digest_size = {
76 					.min = 1,
77 					.max = 20,
78 					.increment = 1
79 				},
80 				.iv_size = { 0 }
81 			}, }
82 		}, }
83 	},
84 	{	/* SHA1 */
85 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
86 		{.sym = {
87 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
88 			{.auth = {
89 				.algo = RTE_CRYPTO_AUTH_SHA1,
90 				.block_size = 64,
91 				.key_size = {
92 					.min = 0,
93 					.max = 0,
94 					.increment = 0
95 				},
96 				.digest_size = {
97 					.min = 20,
98 					.max = 20,
99 					.increment = 0
100 				},
101 				.iv_size = { 0 }
102 			}, }
103 		}, }
104 	},
105 	{	/* SHA224 HMAC */
106 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
107 		{.sym = {
108 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
109 			{.auth = {
110 				.algo = RTE_CRYPTO_AUTH_SHA224_HMAC,
111 				.block_size = 64,
112 				.key_size = {
113 					.min = 1,
114 					.max = 64,
115 					.increment = 1
116 				},
117 				.digest_size = {
118 					.min = 1,
119 					.max = 28,
120 					.increment = 1
121 				},
122 				.iv_size = { 0 }
123 			}, }
124 		}, }
125 	},
126 	{	/* SHA224 */
127 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
128 		{.sym = {
129 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
130 			{.auth = {
131 				.algo = RTE_CRYPTO_AUTH_SHA224,
132 				.block_size = 64,
133 					.key_size = {
134 					.min = 0,
135 					.max = 0,
136 					.increment = 0
137 				},
138 				.digest_size = {
139 					.min = 1,
140 					.max = 28,
141 					.increment = 1
142 				},
143 				.iv_size = { 0 }
144 			}, }
145 		}, }
146 	},
147 	{	/* SHA256 HMAC */
148 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
149 		{.sym = {
150 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
151 			{.auth = {
152 				.algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
153 				.block_size = 64,
154 				.key_size = {
155 					.min = 1,
156 					.max = 64,
157 					.increment = 1
158 				},
159 				.digest_size = {
160 					.min = 1,
161 					.max = 32,
162 					.increment = 1
163 				},
164 				.iv_size = { 0 }
165 			}, }
166 		}, }
167 	},
168 	{	/* SHA256 */
169 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
170 		{.sym = {
171 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
172 			{.auth = {
173 				.algo = RTE_CRYPTO_AUTH_SHA256,
174 				.block_size = 64,
175 				.key_size = {
176 					.min = 0,
177 					.max = 0,
178 					.increment = 0
179 				},
180 				.digest_size = {
181 					.min = 32,
182 					.max = 32,
183 					.increment = 0
184 				},
185 				.iv_size = { 0 }
186 			}, }
187 		}, }
188 	},
189 	{	/* SHA384 HMAC */
190 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
191 		{.sym = {
192 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
193 			{.auth = {
194 				.algo = RTE_CRYPTO_AUTH_SHA384_HMAC,
195 				.block_size = 128,
196 				.key_size = {
197 					.min = 1,
198 					.max = 128,
199 					.increment = 1
200 				},
201 				.digest_size = {
202 					.min = 1,
203 					.max = 48,
204 					.increment = 1
205 				},
206 				.iv_size = { 0 }
207 			}, }
208 		}, }
209 	},
210 	{	/* SHA384 */
211 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
212 		{.sym = {
213 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
214 			{.auth = {
215 				.algo = RTE_CRYPTO_AUTH_SHA384,
216 				.block_size = 128,
217 				.key_size = {
218 					.min = 0,
219 					.max = 0,
220 					.increment = 0
221 				},
222 				.digest_size = {
223 					.min = 48,
224 					.max = 48,
225 					.increment = 0
226 				},
227 				.iv_size = { 0 }
228 			}, }
229 		}, }
230 	},
231 	{	/* SHA512 HMAC */
232 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
233 		{.sym = {
234 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
235 			{.auth = {
236 				.algo = RTE_CRYPTO_AUTH_SHA512_HMAC,
237 				.block_size = 128,
238 				.key_size = {
239 					.min = 1,
240 					.max = 128,
241 					.increment = 1
242 				},
243 				.digest_size = {
244 					.min = 1,
245 					.max = 64,
246 					.increment = 1
247 				},
248 				.iv_size = { 0 }
249 			}, }
250 		}, }
251 	},
252 	{	/* SHA512 */
253 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
254 		{.sym = {
255 			.xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
256 			{.auth = {
257 				.algo = RTE_CRYPTO_AUTH_SHA512,
258 				.block_size = 128,
259 				.key_size = {
260 					.min = 0,
261 					.max = 0,
262 					.increment = 0
263 				},
264 				.digest_size = {
265 					.min = 64,
266 					.max = 64,
267 					.increment = 0
268 				},
269 				.iv_size = { 0 }
270 			}, }
271 		}, }
272 	},
273 	{	/* AES ECB */
274 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
275 		{.sym = {
276 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
277 			{.cipher = {
278 				.algo = RTE_CRYPTO_CIPHER_AES_ECB,
279 				.block_size = 16,
280 				.key_size = {
281 					.min = 16,
282 					.max = 32,
283 					.increment = 8
284 				},
285 				.iv_size = {
286 					.min = 0,
287 					.max = 0,
288 					.increment = 0
289 				}
290 			}, }
291 		}, }
292 	},
293 	{	/* AES CBC */
294 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
295 		{.sym = {
296 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
297 			{.cipher = {
298 				.algo = RTE_CRYPTO_CIPHER_AES_CBC,
299 				.block_size = 16,
300 				.key_size = {
301 					.min = 16,
302 					.max = 32,
303 					.increment = 8
304 				},
305 				.iv_size = {
306 					.min = 16,
307 					.max = 16,
308 					.increment = 0
309 				}
310 			}, }
311 		}, }
312 	},
313 	{	/* AES XTS */
314 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
315 		{.sym = {
316 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
317 			{.cipher = {
318 				.algo = RTE_CRYPTO_CIPHER_AES_XTS,
319 				.block_size = 1,
320 				.key_size = {
321 					.min = 32,
322 					.max = 64,
323 					.increment = 32
324 				},
325 				.iv_size = {
326 					.min = 0,
327 					.max = 0,
328 					.increment = 0
329 				}
330 			}, }
331 		}, }
332 	},
333 	{	/* DES CBC */
334 		.op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
335 		{.sym = {
336 			.xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
337 			{.cipher = {
338 				.algo = RTE_CRYPTO_CIPHER_DES_CBC,
339 				.block_size = 8,
340 				.key_size = {
341 					.min = 8,
342 					.max = 8,
343 					.increment = 0
344 				},
345 				.iv_size = {
346 					.min = 8,
347 					.max = 8,
348 					.increment = 0
349 				}
350 			}, }
351 		}, }
352 	},
353 	/* End of capabilities */
354 	RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
355 };
356 
357 /* Configure device */
358 static int
359 uadk_crypto_pmd_config(struct rte_cryptodev *dev __rte_unused,
360 		       struct rte_cryptodev_config *config)
361 {
362 	char env[128];
363 
364 	/* set queue pairs num via env */
365 	sprintf(env, "sync:%d@0", config->nb_queue_pairs);
366 	setenv("WD_CIPHER_CTX_NUM", env, 1);
367 	setenv("WD_DIGEST_CTX_NUM", env, 1);
368 
369 	return 0;
370 }
371 
372 /* Start device */
373 static int
374 uadk_crypto_pmd_start(struct rte_cryptodev *dev __rte_unused)
375 {
376 	return 0;
377 }
378 
379 /* Stop device */
380 static void
381 uadk_crypto_pmd_stop(struct rte_cryptodev *dev __rte_unused)
382 {
383 }
384 
385 /* Close device */
386 static int
387 uadk_crypto_pmd_close(struct rte_cryptodev *dev)
388 {
389 	struct uadk_crypto_priv *priv = dev->data->dev_private;
390 
391 	if (priv->env_cipher_init) {
392 		wd_cipher_env_uninit();
393 		priv->env_cipher_init = false;
394 	}
395 
396 	if (priv->env_auth_init) {
397 		wd_digest_env_uninit();
398 		priv->env_auth_init = false;
399 	}
400 
401 	return 0;
402 }
403 
404 /* Get device statistics */
405 static void
406 uadk_crypto_pmd_stats_get(struct rte_cryptodev *dev,
407 			  struct rte_cryptodev_stats *stats)
408 {
409 	int qp_id;
410 
411 	for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
412 		struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
413 
414 		stats->enqueued_count += qp->qp_stats.enqueued_count;
415 		stats->dequeued_count += qp->qp_stats.dequeued_count;
416 		stats->enqueue_err_count += qp->qp_stats.enqueue_err_count;
417 		stats->dequeue_err_count += qp->qp_stats.dequeue_err_count;
418 	}
419 }
420 
421 /* Reset device statistics */
422 static void
423 uadk_crypto_pmd_stats_reset(struct rte_cryptodev *dev __rte_unused)
424 {
425 	int qp_id;
426 
427 	for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
428 		struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
429 
430 		memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
431 	}
432 }
433 
434 /* Get device info */
435 static void
436 uadk_crypto_pmd_info_get(struct rte_cryptodev *dev,
437 			 struct rte_cryptodev_info *dev_info)
438 {
439 	struct uadk_crypto_priv *priv = dev->data->dev_private;
440 
441 	if (dev_info != NULL) {
442 		dev_info->driver_id = dev->driver_id;
443 		dev_info->driver_name = dev->device->driver->name;
444 		dev_info->max_nb_queue_pairs = priv->max_nb_qpairs;
445 		/* No limit of number of sessions */
446 		dev_info->sym.max_nb_sessions = 0;
447 		dev_info->feature_flags = dev->feature_flags;
448 
449 		if (priv->version == UADK_CRYPTO_V2)
450 			dev_info->capabilities = uadk_crypto_v2_capabilities;
451 	}
452 }
453 
454 /* Release queue pair */
455 static int
456 uadk_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
457 {
458 	struct uadk_qp *qp = dev->data->queue_pairs[qp_id];
459 
460 	if (qp) {
461 		rte_ring_free(qp->processed_pkts);
462 		rte_free(qp);
463 		dev->data->queue_pairs[qp_id] = NULL;
464 	}
465 
466 	return 0;
467 }
468 
469 /* set a unique name for the queue pair based on its name, dev_id and qp_id */
470 static int
471 uadk_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
472 			    struct uadk_qp *qp)
473 {
474 	unsigned int n = snprintf(qp->name, sizeof(qp->name),
475 				  "uadk_crypto_pmd_%u_qp_%u",
476 				  dev->data->dev_id, qp->id);
477 
478 	if (n >= sizeof(qp->name))
479 		return -EINVAL;
480 
481 	return 0;
482 }
483 
484 /* Create a ring to place process packets on */
485 static struct rte_ring *
486 uadk_pmd_qp_create_processed_pkts_ring(struct uadk_qp *qp,
487 				       unsigned int ring_size, int socket_id)
488 {
489 	struct rte_ring *r = qp->processed_pkts;
490 
491 	if (r) {
492 		if (rte_ring_get_size(r) >= ring_size) {
493 			UADK_LOG(INFO, "Reusing existing ring %s for processed packets",
494 				 qp->name);
495 			return r;
496 		}
497 
498 		UADK_LOG(ERR, "Unable to reuse existing ring %s for processed packets",
499 			 qp->name);
500 		return NULL;
501 	}
502 
503 	return rte_ring_create(qp->name, ring_size, socket_id,
504 			       RING_F_EXACT_SZ);
505 }
506 
507 static int
508 uadk_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
509 			 const struct rte_cryptodev_qp_conf *qp_conf,
510 			 int socket_id)
511 {
512 	struct uadk_qp *qp;
513 
514 	/* Free memory prior to re-allocation if needed. */
515 	if (dev->data->queue_pairs[qp_id] != NULL)
516 		uadk_crypto_pmd_qp_release(dev, qp_id);
517 
518 	/* Allocate the queue pair data structure. */
519 	qp = rte_zmalloc_socket("uadk PMD Queue Pair", sizeof(*qp),
520 				RTE_CACHE_LINE_SIZE, socket_id);
521 	if (qp == NULL)
522 		return (-ENOMEM);
523 
524 	qp->id = qp_id;
525 	dev->data->queue_pairs[qp_id] = qp;
526 
527 	if (uadk_pmd_qp_set_unique_name(dev, qp))
528 		goto qp_setup_cleanup;
529 
530 	qp->processed_pkts = uadk_pmd_qp_create_processed_pkts_ring(qp,
531 				qp_conf->nb_descriptors, socket_id);
532 	if (qp->processed_pkts == NULL)
533 		goto qp_setup_cleanup;
534 
535 	memset(&qp->qp_stats, 0, sizeof(qp->qp_stats));
536 
537 	return 0;
538 
539 qp_setup_cleanup:
540 	if (qp) {
541 		rte_free(qp);
542 		qp = NULL;
543 	}
544 	return -EINVAL;
545 }
546 
547 static unsigned int
548 uadk_crypto_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
549 {
550 	return sizeof(struct uadk_crypto_session);
551 }
552 
553 static enum uadk_chain_order
554 uadk_get_chain_order(const struct rte_crypto_sym_xform *xform)
555 {
556 	enum uadk_chain_order res = UADK_CHAIN_NOT_SUPPORTED;
557 
558 	if (xform != NULL) {
559 		if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
560 			if (xform->next == NULL)
561 				res = UADK_CHAIN_ONLY_AUTH;
562 			else if (xform->next->type ==
563 					RTE_CRYPTO_SYM_XFORM_CIPHER)
564 				res = UADK_CHAIN_AUTH_CIPHER;
565 		}
566 
567 		if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
568 			if (xform->next == NULL)
569 				res = UADK_CHAIN_ONLY_CIPHER;
570 			else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
571 				res = UADK_CHAIN_CIPHER_AUTH;
572 		}
573 	}
574 
575 	return res;
576 }
577 
578 static int
579 uadk_set_session_cipher_parameters(struct rte_cryptodev *dev,
580 				   struct uadk_crypto_session *sess,
581 				   struct rte_crypto_sym_xform *xform)
582 {
583 	struct uadk_crypto_priv *priv = dev->data->dev_private;
584 	struct rte_crypto_cipher_xform *cipher = &xform->cipher;
585 	struct wd_cipher_sess_setup setup = {0};
586 	struct sched_params params = {0};
587 	int ret;
588 
589 	if (!priv->env_cipher_init) {
590 		ret = wd_cipher_env_init(NULL);
591 		if (ret < 0)
592 			return -EINVAL;
593 		priv->env_cipher_init = true;
594 	}
595 
596 	sess->cipher.direction = cipher->op;
597 	sess->iv.offset = cipher->iv.offset;
598 	sess->iv.length = cipher->iv.length;
599 
600 	switch (cipher->algo) {
601 	/* Cover supported cipher algorithms */
602 	case RTE_CRYPTO_CIPHER_AES_CTR:
603 		setup.alg = WD_CIPHER_AES;
604 		setup.mode = WD_CIPHER_CTR;
605 		sess->cipher.req.out_bytes = 64;
606 		break;
607 	case RTE_CRYPTO_CIPHER_AES_ECB:
608 		setup.alg = WD_CIPHER_AES;
609 		setup.mode = WD_CIPHER_ECB;
610 		sess->cipher.req.out_bytes = 16;
611 		break;
612 	case RTE_CRYPTO_CIPHER_AES_CBC:
613 		setup.alg = WD_CIPHER_AES;
614 		setup.mode = WD_CIPHER_CBC;
615 		if (cipher->key.length == 16)
616 			sess->cipher.req.out_bytes = 16;
617 		else
618 			sess->cipher.req.out_bytes = 64;
619 		break;
620 	case RTE_CRYPTO_CIPHER_AES_XTS:
621 		setup.alg = WD_CIPHER_AES;
622 		setup.mode = WD_CIPHER_XTS;
623 		if (cipher->key.length == 16)
624 			sess->cipher.req.out_bytes = 32;
625 		else
626 			sess->cipher.req.out_bytes = 512;
627 		break;
628 	default:
629 		ret = -ENOTSUP;
630 		goto env_uninit;
631 	}
632 
633 	params.numa_id = -1;	/* choose nearby numa node */
634 	setup.sched_param = &params;
635 	sess->handle_cipher = wd_cipher_alloc_sess(&setup);
636 	if (!sess->handle_cipher) {
637 		UADK_LOG(ERR, "uadk failed to alloc session!");
638 		ret = -EINVAL;
639 		goto env_uninit;
640 	}
641 
642 	ret = wd_cipher_set_key(sess->handle_cipher, cipher->key.data, cipher->key.length);
643 	if (ret) {
644 		wd_cipher_free_sess(sess->handle_cipher);
645 		UADK_LOG(ERR, "uadk failed to set key!");
646 		ret = -EINVAL;
647 		goto env_uninit;
648 	}
649 
650 	return 0;
651 
652 env_uninit:
653 	wd_cipher_env_uninit();
654 	priv->env_cipher_init = false;
655 	return ret;
656 }
657 
658 /* Set session auth parameters */
659 static int
660 uadk_set_session_auth_parameters(struct rte_cryptodev *dev,
661 				 struct uadk_crypto_session *sess,
662 				 struct rte_crypto_sym_xform *xform)
663 {
664 	struct uadk_crypto_priv *priv = dev->data->dev_private;
665 	struct wd_digest_sess_setup setup = {0};
666 	struct sched_params params = {0};
667 	int ret;
668 
669 	if (!priv->env_auth_init) {
670 		ret = wd_digest_env_init(NULL);
671 		if (ret < 0)
672 			return -EINVAL;
673 		priv->env_auth_init = true;
674 	}
675 
676 	sess->auth.operation = xform->auth.op;
677 	sess->auth.digest_length = xform->auth.digest_length;
678 
679 	switch (xform->auth.algo) {
680 	case RTE_CRYPTO_AUTH_MD5:
681 	case RTE_CRYPTO_AUTH_MD5_HMAC:
682 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_MD5) ?
683 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
684 		setup.alg = WD_DIGEST_MD5;
685 		sess->auth.req.out_buf_bytes = 16;
686 		sess->auth.req.out_bytes = 16;
687 		break;
688 	case RTE_CRYPTO_AUTH_SHA1:
689 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
690 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA1) ?
691 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
692 		setup.alg = WD_DIGEST_SHA1;
693 		sess->auth.req.out_buf_bytes = 20;
694 		sess->auth.req.out_bytes = 20;
695 		break;
696 	case RTE_CRYPTO_AUTH_SHA224:
697 	case RTE_CRYPTO_AUTH_SHA224_HMAC:
698 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA224) ?
699 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
700 		setup.alg = WD_DIGEST_SHA224;
701 		sess->auth.req.out_buf_bytes = 28;
702 		sess->auth.req.out_bytes = 28;
703 		break;
704 	case RTE_CRYPTO_AUTH_SHA256:
705 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
706 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA256) ?
707 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
708 		setup.alg = WD_DIGEST_SHA256;
709 		sess->auth.req.out_buf_bytes = 32;
710 		sess->auth.req.out_bytes = 32;
711 		break;
712 	case RTE_CRYPTO_AUTH_SHA384:
713 	case RTE_CRYPTO_AUTH_SHA384_HMAC:
714 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA384) ?
715 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
716 		setup.alg = WD_DIGEST_SHA384;
717 		sess->auth.req.out_buf_bytes = 48;
718 		sess->auth.req.out_bytes = 48;
719 		break;
720 	case RTE_CRYPTO_AUTH_SHA512:
721 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
722 		setup.mode = (xform->auth.algo == RTE_CRYPTO_AUTH_SHA512) ?
723 			     WD_DIGEST_NORMAL : WD_DIGEST_HMAC;
724 		setup.alg = WD_DIGEST_SHA512;
725 		sess->auth.req.out_buf_bytes = 64;
726 		sess->auth.req.out_bytes = 64;
727 		break;
728 	default:
729 		ret = -ENOTSUP;
730 		goto env_uninit;
731 	}
732 
733 	params.numa_id = -1;	/* choose nearby numa node */
734 	setup.sched_param = &params;
735 	sess->handle_digest = wd_digest_alloc_sess(&setup);
736 	if (!sess->handle_digest) {
737 		UADK_LOG(ERR, "uadk failed to alloc session!");
738 		ret = -EINVAL;
739 		goto env_uninit;
740 	}
741 
742 	/* if mode is HMAC, should set key */
743 	if (setup.mode == WD_DIGEST_HMAC) {
744 		ret = wd_digest_set_key(sess->handle_digest,
745 					xform->auth.key.data,
746 					xform->auth.key.length);
747 		if (ret) {
748 			UADK_LOG(ERR, "uadk failed to alloc session!");
749 			wd_digest_free_sess(sess->handle_digest);
750 			sess->handle_digest = 0;
751 			ret = -EINVAL;
752 			goto env_uninit;
753 		}
754 	}
755 
756 	return 0;
757 
758 env_uninit:
759 	wd_digest_env_uninit();
760 	priv->env_auth_init = false;
761 	return ret;
762 }
763 
764 static int
765 uadk_crypto_sym_session_configure(struct rte_cryptodev *dev,
766 				  struct rte_crypto_sym_xform *xform,
767 				  struct rte_cryptodev_sym_session *session)
768 {
769 	struct rte_crypto_sym_xform *cipher_xform = NULL;
770 	struct rte_crypto_sym_xform *auth_xform = NULL;
771 	struct uadk_crypto_session *sess = CRYPTODEV_GET_SYM_SESS_PRIV(session);
772 	int ret;
773 
774 	if (unlikely(!sess)) {
775 		UADK_LOG(ERR, "Session not available");
776 		return -EINVAL;
777 	}
778 
779 	sess->chain_order = uadk_get_chain_order(xform);
780 	switch (sess->chain_order) {
781 	case UADK_CHAIN_ONLY_CIPHER:
782 		cipher_xform = xform;
783 		break;
784 	case UADK_CHAIN_ONLY_AUTH:
785 		auth_xform = xform;
786 		break;
787 	case UADK_CHAIN_CIPHER_AUTH:
788 		cipher_xform = xform;
789 		auth_xform = xform->next;
790 		break;
791 	case UADK_CHAIN_AUTH_CIPHER:
792 		auth_xform = xform;
793 		cipher_xform = xform->next;
794 		break;
795 	default:
796 		return -ENOTSUP;
797 	}
798 
799 	if (cipher_xform) {
800 		ret = uadk_set_session_cipher_parameters(dev, sess, cipher_xform);
801 		if (ret != 0) {
802 			UADK_LOG(ERR,
803 				"Invalid/unsupported cipher parameters");
804 			return ret;
805 		}
806 	}
807 
808 	if (auth_xform) {
809 		ret = uadk_set_session_auth_parameters(dev, sess, auth_xform);
810 		if (ret != 0) {
811 			UADK_LOG(ERR,
812 				"Invalid/unsupported auth parameters");
813 			return ret;
814 		}
815 	}
816 
817 	return 0;
818 }
819 
820 static void
821 uadk_crypto_sym_session_clear(struct rte_cryptodev *dev __rte_unused,
822 			      struct rte_cryptodev_sym_session *session)
823 {
824 	struct uadk_crypto_session *sess = CRYPTODEV_GET_SYM_SESS_PRIV(session);
825 
826 	if (unlikely(sess == NULL)) {
827 		UADK_LOG(ERR, "Session not available");
828 		return;
829 	}
830 
831 	if (sess->handle_cipher) {
832 		wd_cipher_free_sess(sess->handle_cipher);
833 		sess->handle_cipher = 0;
834 	}
835 
836 	if (sess->handle_digest) {
837 		wd_digest_free_sess(sess->handle_digest);
838 		sess->handle_digest = 0;
839 	}
840 }
841 
842 static struct rte_cryptodev_ops uadk_crypto_pmd_ops = {
843 		.dev_configure		= uadk_crypto_pmd_config,
844 		.dev_start		= uadk_crypto_pmd_start,
845 		.dev_stop		= uadk_crypto_pmd_stop,
846 		.dev_close		= uadk_crypto_pmd_close,
847 		.stats_get		= uadk_crypto_pmd_stats_get,
848 		.stats_reset		= uadk_crypto_pmd_stats_reset,
849 		.dev_infos_get		= uadk_crypto_pmd_info_get,
850 		.queue_pair_setup	= uadk_crypto_pmd_qp_setup,
851 		.queue_pair_release	= uadk_crypto_pmd_qp_release,
852 		.sym_session_get_size	= uadk_crypto_sym_session_get_size,
853 		.sym_session_configure	= uadk_crypto_sym_session_configure,
854 		.sym_session_clear	= uadk_crypto_sym_session_clear,
855 };
856 
857 static void
858 uadk_process_cipher_op(struct rte_crypto_op *op,
859 		       struct uadk_crypto_session *sess,
860 		       struct rte_mbuf *msrc, struct rte_mbuf *mdst)
861 {
862 	uint32_t off = op->sym->cipher.data.offset;
863 	int ret;
864 
865 	if (!sess) {
866 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
867 		return;
868 	}
869 
870 	sess->cipher.req.src = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
871 	sess->cipher.req.in_bytes = op->sym->cipher.data.length;
872 	sess->cipher.req.dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *, off);
873 	sess->cipher.req.out_buf_bytes = sess->cipher.req.in_bytes;
874 	sess->cipher.req.iv_bytes = sess->iv.length;
875 	sess->cipher.req.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
876 							sess->iv.offset);
877 	if (sess->cipher.direction == RTE_CRYPTO_CIPHER_OP_ENCRYPT)
878 		sess->cipher.req.op_type = WD_CIPHER_ENCRYPTION;
879 	else
880 		sess->cipher.req.op_type = WD_CIPHER_DECRYPTION;
881 
882 	do {
883 		ret = wd_do_cipher_sync(sess->handle_cipher, &sess->cipher.req);
884 	} while (ret == -WD_EBUSY);
885 
886 	if (ret)
887 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
888 }
889 
890 static void
891 uadk_process_auth_op(struct uadk_qp *qp, struct rte_crypto_op *op,
892 		     struct uadk_crypto_session *sess,
893 		     struct rte_mbuf *msrc, struct rte_mbuf *mdst)
894 {
895 	uint32_t srclen = op->sym->auth.data.length;
896 	uint32_t off = op->sym->auth.data.offset;
897 	uint8_t *dst = qp->temp_digest;
898 	int ret;
899 
900 	if (!sess) {
901 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
902 		return;
903 	}
904 
905 	sess->auth.req.in = rte_pktmbuf_mtod_offset(msrc, uint8_t *, off);
906 	sess->auth.req.in_bytes = srclen;
907 	sess->auth.req.out = dst;
908 
909 	do {
910 		ret = wd_do_digest_sync(sess->handle_digest, &sess->auth.req);
911 	} while (ret == -WD_EBUSY);
912 
913 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
914 		if (memcmp(dst, op->sym->auth.digest.data,
915 				sess->auth.digest_length) != 0) {
916 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
917 		}
918 	} else {
919 		uint8_t *auth_dst;
920 
921 		auth_dst = op->sym->auth.digest.data;
922 		if (auth_dst == NULL)
923 			auth_dst = rte_pktmbuf_mtod_offset(mdst, uint8_t *,
924 					op->sym->auth.data.offset +
925 					op->sym->auth.data.length);
926 		memcpy(auth_dst, dst, sess->auth.digest_length);
927 	}
928 
929 	if (ret)
930 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
931 }
932 
933 static uint16_t
934 uadk_crypto_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
935 			  uint16_t nb_ops)
936 {
937 	struct uadk_qp *qp = queue_pair;
938 	struct uadk_crypto_session *sess = NULL;
939 	struct rte_mbuf *msrc, *mdst;
940 	struct rte_crypto_op *op;
941 	uint16_t enqd = 0;
942 	int i, ret;
943 
944 	for (i = 0; i < nb_ops; i++) {
945 		op = ops[i];
946 		op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
947 		msrc = op->sym->m_src;
948 		mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
949 
950 		if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
951 			if (likely(op->sym->session != NULL))
952 				sess = CRYPTODEV_GET_SYM_SESS_PRIV(
953 					op->sym->session);
954 		}
955 
956 		switch (sess->chain_order) {
957 		case UADK_CHAIN_ONLY_CIPHER:
958 			uadk_process_cipher_op(op, sess, msrc, mdst);
959 			break;
960 		case UADK_CHAIN_ONLY_AUTH:
961 			uadk_process_auth_op(qp, op, sess, msrc, mdst);
962 			break;
963 		case UADK_CHAIN_CIPHER_AUTH:
964 			uadk_process_cipher_op(op, sess, msrc, mdst);
965 			uadk_process_auth_op(qp, op, sess, mdst, mdst);
966 			break;
967 		case UADK_CHAIN_AUTH_CIPHER:
968 			uadk_process_auth_op(qp, op, sess, msrc, mdst);
969 			uadk_process_cipher_op(op, sess, msrc, mdst);
970 			break;
971 		default:
972 			op->status = RTE_CRYPTO_OP_STATUS_ERROR;
973 			break;
974 		}
975 
976 		if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
977 			op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
978 
979 		if (op->status != RTE_CRYPTO_OP_STATUS_ERROR) {
980 			ret = rte_ring_enqueue(qp->processed_pkts, (void *)op);
981 			if (ret < 0)
982 				goto enqueue_err;
983 			qp->qp_stats.enqueued_count++;
984 			enqd++;
985 		} else {
986 			/* increment count if failed to enqueue op */
987 			qp->qp_stats.enqueue_err_count++;
988 		}
989 	}
990 
991 	return enqd;
992 
993 enqueue_err:
994 	qp->qp_stats.enqueue_err_count++;
995 	return enqd;
996 }
997 
998 static uint16_t
999 uadk_crypto_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
1000 			  uint16_t nb_ops)
1001 {
1002 	struct uadk_qp *qp = queue_pair;
1003 	unsigned int nb_dequeued;
1004 
1005 	nb_dequeued = rte_ring_dequeue_burst(qp->processed_pkts,
1006 			(void **)ops, nb_ops, NULL);
1007 	qp->qp_stats.dequeued_count += nb_dequeued;
1008 
1009 	return nb_dequeued;
1010 }
1011 
1012 static int
1013 uadk_cryptodev_probe(struct rte_vdev_device *vdev)
1014 {
1015 	struct rte_cryptodev_pmd_init_params init_params = {
1016 		.name = "",
1017 		.private_data_size = sizeof(struct uadk_crypto_priv),
1018 		.max_nb_queue_pairs =
1019 				RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS,
1020 	};
1021 	enum uadk_crypto_version version = UADK_CRYPTO_V2;
1022 	struct uadk_crypto_priv *priv;
1023 	struct rte_cryptodev *dev;
1024 	struct uacce_dev *udev;
1025 	const char *input_args;
1026 	const char *name;
1027 
1028 	udev = wd_get_accel_dev("cipher");
1029 	if (!udev)
1030 		return -ENODEV;
1031 
1032 	if (!strcmp(udev->api, "hisi_qm_v2"))
1033 		version = UADK_CRYPTO_V2;
1034 
1035 	free(udev);
1036 
1037 	name = rte_vdev_device_name(vdev);
1038 	if (name == NULL)
1039 		return -EINVAL;
1040 
1041 	input_args = rte_vdev_device_args(vdev);
1042 	rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
1043 
1044 	dev = rte_cryptodev_pmd_create(name, &vdev->device, &init_params);
1045 	if (dev == NULL) {
1046 		UADK_LOG(ERR, "driver %s: create failed", init_params.name);
1047 		return -ENODEV;
1048 	}
1049 
1050 	dev->dev_ops = &uadk_crypto_pmd_ops;
1051 	dev->driver_id = uadk_cryptodev_driver_id;
1052 	dev->dequeue_burst = uadk_crypto_dequeue_burst;
1053 	dev->enqueue_burst = uadk_crypto_enqueue_burst;
1054 	dev->feature_flags = RTE_CRYPTODEV_FF_HW_ACCELERATED |
1055 			     RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO;
1056 	priv = dev->data->dev_private;
1057 	priv->version = version;
1058 	priv->max_nb_qpairs = init_params.max_nb_queue_pairs;
1059 
1060 	rte_cryptodev_pmd_probing_finish(dev);
1061 
1062 	return 0;
1063 }
1064 
1065 static int
1066 uadk_cryptodev_remove(struct rte_vdev_device *vdev)
1067 {
1068 	struct rte_cryptodev *cryptodev;
1069 	const char *name;
1070 
1071 	name = rte_vdev_device_name(vdev);
1072 	if (name == NULL)
1073 		return -EINVAL;
1074 
1075 	cryptodev = rte_cryptodev_pmd_get_named_dev(name);
1076 	if (cryptodev == NULL)
1077 		return -ENODEV;
1078 
1079 	return rte_cryptodev_pmd_destroy(cryptodev);
1080 }
1081 
1082 static struct rte_vdev_driver uadk_crypto_pmd = {
1083 	.probe       = uadk_cryptodev_probe,
1084 	.remove      = uadk_cryptodev_remove,
1085 };
1086 
1087 static struct cryptodev_driver uadk_crypto_drv;
1088 
1089 #define UADK_CRYPTO_DRIVER_NAME crypto_uadk
1090 RTE_PMD_REGISTER_VDEV(UADK_CRYPTO_DRIVER_NAME, uadk_crypto_pmd);
1091 RTE_PMD_REGISTER_CRYPTO_DRIVER(uadk_crypto_drv, uadk_crypto_pmd.driver,
1092 			       uadk_cryptodev_driver_id);
1093 RTE_PMD_REGISTER_PARAM_STRING(UADK_CRYPTO_DRIVER_NAME,
1094 			      "max_nb_queue_pairs=<int>");
1095 RTE_LOG_REGISTER_DEFAULT(uadk_crypto_logtype, INFO);
1096