xref: /dpdk/drivers/crypto/armv8/rte_armv8_pmd.c (revision 3e0ceb9f17fff027fc6c8f18de35e11719ffa61e)
1 /*
2  *   BSD LICENSE
3  *
4  *   Copyright (C) Cavium, Inc. 2017.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Cavium, Inc nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32 
33 #include <stdbool.h>
34 
35 #include <rte_common.h>
36 #include <rte_hexdump.h>
37 #include <rte_cryptodev.h>
38 #include <rte_cryptodev_pmd.h>
39 #include <rte_bus_vdev.h>
40 #include <rte_malloc.h>
41 #include <rte_cpuflags.h>
42 
43 #include "armv8_crypto_defs.h"
44 
45 #include "rte_armv8_pmd_private.h"
46 
47 static uint8_t cryptodev_driver_id;
48 
49 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
50 
51 /**
52  * Pointers to the supported combined mode crypto functions are stored
53  * in the static tables. Each combined (chained) cryptographic operation
54  * can be described by a set of numbers:
55  * - order:	order of operations (cipher, auth) or (auth, cipher)
56  * - direction:	encryption or decryption
57  * - calg:	cipher algorithm such as AES_CBC, AES_CTR, etc.
58  * - aalg:	authentication algorithm such as SHA1, SHA256, etc.
59  * - keyl:	cipher key length, for example 128, 192, 256 bits
60  *
61  * In order to quickly acquire each function pointer based on those numbers,
62  * a hierarchy of arrays is maintained. The final level, 3D array is indexed
63  * by the combined mode function parameters only (cipher algorithm,
64  * authentication algorithm and key length).
65  *
66  * This gives 3 memory accesses to obtain a function pointer instead of
67  * traversing the array manually and comparing function parameters on each loop.
68  *
69  *                   +--+CRYPTO_FUNC
70  *            +--+ENC|
71  *      +--+CA|
72  *      |     +--+DEC
73  * ORDER|
74  *      |     +--+ENC
75  *      +--+AC|
76  *            +--+DEC
77  *
78  */
79 
80 /**
81  * 3D array type for ARM Combined Mode crypto functions pointers.
82  * CRYPTO_CIPHER_MAX:			max cipher ID number
83  * CRYPTO_AUTH_MAX:			max auth ID number
84  * CRYPTO_CIPHER_KEYLEN_MAX:		max key length ID number
85  */
86 typedef const crypto_func_t
87 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
88 
89 /* Evaluate to key length definition */
90 #define KEYL(keyl)		(ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
91 
92 /* Local aliases for supported ciphers */
93 #define CIPH_AES_CBC		RTE_CRYPTO_CIPHER_AES_CBC
94 /* Local aliases for supported hashes */
95 #define AUTH_SHA1_HMAC		RTE_CRYPTO_AUTH_SHA1_HMAC
96 #define AUTH_SHA256_HMAC	RTE_CRYPTO_AUTH_SHA256_HMAC
97 
98 /**
99  * Arrays containing pointers to particular cryptographic,
100  * combined mode functions.
101  * crypto_op_ca_encrypt:	cipher (encrypt), authenticate
102  * crypto_op_ca_decrypt:	cipher (decrypt), authenticate
103  * crypto_op_ac_encrypt:	authenticate, cipher (encrypt)
104  * crypto_op_ac_decrypt:	authenticate, cipher (decrypt)
105  */
106 static const crypto_func_tbl_t
107 crypto_op_ca_encrypt = {
108 	/* [cipher alg][auth alg][key length] = crypto_function, */
109 	[CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = aes128cbc_sha1_hmac,
110 	[CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = aes128cbc_sha256_hmac,
111 };
112 
113 static const crypto_func_tbl_t
114 crypto_op_ca_decrypt = {
115 	NULL
116 };
117 
118 static const crypto_func_tbl_t
119 crypto_op_ac_encrypt = {
120 	NULL
121 };
122 
123 static const crypto_func_tbl_t
124 crypto_op_ac_decrypt = {
125 	/* [cipher alg][auth alg][key length] = crypto_function, */
126 	[CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] = sha1_hmac_aes128cbc_dec,
127 	[CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] = sha256_hmac_aes128cbc_dec,
128 };
129 
130 /**
131  * Arrays containing pointers to particular cryptographic function sets,
132  * covering given cipher operation directions (encrypt, decrypt)
133  * for each order of cipher and authentication pairs.
134  */
135 static const crypto_func_tbl_t *
136 crypto_cipher_auth[] = {
137 	&crypto_op_ca_encrypt,
138 	&crypto_op_ca_decrypt,
139 	NULL
140 };
141 
142 static const crypto_func_tbl_t *
143 crypto_auth_cipher[] = {
144 	&crypto_op_ac_encrypt,
145 	&crypto_op_ac_decrypt,
146 	NULL
147 };
148 
149 /**
150  * Top level array containing pointers to particular cryptographic
151  * function sets, covering given order of chained operations.
152  * crypto_cipher_auth:	cipher first, authenticate after
153  * crypto_auth_cipher:	authenticate first, cipher after
154  */
155 static const crypto_func_tbl_t **
156 crypto_chain_order[] = {
157 	crypto_cipher_auth,
158 	crypto_auth_cipher,
159 	NULL
160 };
161 
162 /**
163  * Extract particular combined mode crypto function from the 3D array.
164  */
165 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl)			\
166 ({									\
167 	crypto_func_tbl_t *func_tbl =					\
168 				(crypto_chain_order[(order)])[(cop)];	\
169 									\
170 	((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]);		\
171 })
172 
173 /*----------------------------------------------------------------------------*/
174 
175 /**
176  * 2D array type for ARM key schedule functions pointers.
177  * CRYPTO_CIPHER_MAX:			max cipher ID number
178  * CRYPTO_CIPHER_KEYLEN_MAX:		max key length ID number
179  */
180 typedef const crypto_key_sched_t
181 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
182 
183 static const crypto_key_sched_tbl_t
184 crypto_key_sched_encrypt = {
185 	/* [cipher alg][key length] = key_expand_func, */
186 	[CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_enc,
187 };
188 
189 static const crypto_key_sched_tbl_t
190 crypto_key_sched_decrypt = {
191 	/* [cipher alg][key length] = key_expand_func, */
192 	[CIPH_AES_CBC][KEYL(128)] = aes128_key_sched_dec,
193 };
194 
195 /**
196  * Top level array containing pointers to particular key generation
197  * function sets, covering given operation direction.
198  * crypto_key_sched_encrypt:	keys for encryption
199  * crypto_key_sched_decrypt:	keys for decryption
200  */
201 static const crypto_key_sched_tbl_t *
202 crypto_key_sched_dir[] = {
203 	&crypto_key_sched_encrypt,
204 	&crypto_key_sched_decrypt,
205 	NULL
206 };
207 
208 /**
209  * Extract particular combined mode crypto function from the 3D array.
210  */
211 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl)				\
212 ({									\
213 	crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)];	\
214 									\
215 	((*ks_tbl)[(calg)][KEYL(keyl)]);				\
216 })
217 
218 /*----------------------------------------------------------------------------*/
219 
220 /*
221  *------------------------------------------------------------------------------
222  * Session Prepare
223  *------------------------------------------------------------------------------
224  */
225 
226 /** Get xform chain order */
227 static enum armv8_crypto_chain_order
228 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
229 {
230 
231 	/*
232 	 * This driver currently covers only chained operations.
233 	 * Ignore only cipher or only authentication operations
234 	 * or chains longer than 2 xform structures.
235 	 */
236 	if (xform->next == NULL || xform->next->next != NULL)
237 		return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
238 
239 	if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
240 		if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
241 			return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
242 	}
243 
244 	if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
245 		if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
246 			return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
247 	}
248 
249 	return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
250 }
251 
252 static inline void
253 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
254 				const struct rte_crypto_sym_xform *xform)
255 {
256 	size_t i;
257 
258 	/* Generate i_key_pad and o_key_pad */
259 	memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
260 	rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
261 							xform->auth.key.length);
262 	memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
263 	rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
264 							xform->auth.key.length);
265 	/*
266 	 * XOR key with IPAD/OPAD values to obtain i_key_pad
267 	 * and o_key_pad.
268 	 * Byte-by-byte operation may seem to be the less efficient
269 	 * here but in fact it's the opposite.
270 	 * The result ASM code is likely operate on NEON registers
271 	 * (load auth key to Qx, load IPAD/OPAD to multiple
272 	 * elements of Qy, eor 128 bits at once).
273 	 */
274 	for (i = 0; i < SHA_BLOCK_MAX; i++) {
275 		sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
276 		sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
277 	}
278 }
279 
280 static inline int
281 auth_set_prerequisites(struct armv8_crypto_session *sess,
282 			const struct rte_crypto_sym_xform *xform)
283 {
284 	uint8_t partial[64] = { 0 };
285 	int error;
286 
287 	switch (xform->auth.algo) {
288 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
289 		/*
290 		 * Generate authentication key, i_key_pad and o_key_pad.
291 		 */
292 		/* Zero memory under key */
293 		memset(sess->auth.hmac.key, 0, SHA1_BLOCK_SIZE);
294 
295 		/*
296 		 * Now copy the given authentication key to the session
297 		 * key.
298 		 */
299 		rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
300 						xform->auth.key.length);
301 
302 		/* Prepare HMAC padding: key|pattern */
303 		auth_hmac_pad_prepare(sess, xform);
304 		/*
305 		 * Calculate partial hash values for i_key_pad and o_key_pad.
306 		 * Will be used as initialization state for final HMAC.
307 		 */
308 		error = sha1_block_partial(NULL, sess->auth.hmac.i_key_pad,
309 		    partial, SHA1_BLOCK_SIZE);
310 		if (error != 0)
311 			return -1;
312 		memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
313 
314 		error = sha1_block_partial(NULL, sess->auth.hmac.o_key_pad,
315 		    partial, SHA1_BLOCK_SIZE);
316 		if (error != 0)
317 			return -1;
318 		memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
319 
320 		break;
321 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
322 		/*
323 		 * Generate authentication key, i_key_pad and o_key_pad.
324 		 */
325 		/* Zero memory under key */
326 		memset(sess->auth.hmac.key, 0, SHA256_BLOCK_SIZE);
327 
328 		/*
329 		 * Now copy the given authentication key to the session
330 		 * key.
331 		 */
332 		rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
333 						xform->auth.key.length);
334 
335 		/* Prepare HMAC padding: key|pattern */
336 		auth_hmac_pad_prepare(sess, xform);
337 		/*
338 		 * Calculate partial hash values for i_key_pad and o_key_pad.
339 		 * Will be used as initialization state for final HMAC.
340 		 */
341 		error = sha256_block_partial(NULL, sess->auth.hmac.i_key_pad,
342 		    partial, SHA256_BLOCK_SIZE);
343 		if (error != 0)
344 			return -1;
345 		memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
346 
347 		error = sha256_block_partial(NULL, sess->auth.hmac.o_key_pad,
348 		    partial, SHA256_BLOCK_SIZE);
349 		if (error != 0)
350 			return -1;
351 		memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
352 
353 		break;
354 	default:
355 		break;
356 	}
357 
358 	return 0;
359 }
360 
361 static inline int
362 cipher_set_prerequisites(struct armv8_crypto_session *sess,
363 			const struct rte_crypto_sym_xform *xform)
364 {
365 	crypto_key_sched_t cipher_key_sched;
366 
367 	cipher_key_sched = sess->cipher.key_sched;
368 	if (likely(cipher_key_sched != NULL)) {
369 		/* Set up cipher session key */
370 		cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
371 	}
372 
373 	return 0;
374 }
375 
376 static int
377 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
378 		const struct rte_crypto_sym_xform *cipher_xform,
379 		const struct rte_crypto_sym_xform *auth_xform)
380 {
381 	enum armv8_crypto_chain_order order;
382 	enum armv8_crypto_cipher_operation cop;
383 	enum rte_crypto_cipher_algorithm calg;
384 	enum rte_crypto_auth_algorithm aalg;
385 
386 	/* Validate and prepare scratch order of combined operations */
387 	switch (sess->chain_order) {
388 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
389 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
390 		order = sess->chain_order;
391 		break;
392 	default:
393 		return -ENOTSUP;
394 	}
395 	/* Select cipher direction */
396 	sess->cipher.direction = cipher_xform->cipher.op;
397 	/* Select cipher key */
398 	sess->cipher.key.length = cipher_xform->cipher.key.length;
399 	/* Set cipher direction */
400 	cop = sess->cipher.direction;
401 	/* Set cipher algorithm */
402 	calg = cipher_xform->cipher.algo;
403 
404 	/* Select cipher algo */
405 	switch (calg) {
406 	/* Cover supported cipher algorithms */
407 	case RTE_CRYPTO_CIPHER_AES_CBC:
408 		sess->cipher.algo = calg;
409 		/* IV len is always 16 bytes (block size) for AES CBC */
410 		sess->cipher.iv.length = 16;
411 		break;
412 	default:
413 		return -ENOTSUP;
414 	}
415 	/* Select auth generate/verify */
416 	sess->auth.operation = auth_xform->auth.op;
417 
418 	/* Select auth algo */
419 	switch (auth_xform->auth.algo) {
420 	/* Cover supported hash algorithms */
421 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
422 	case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
423 		aalg = auth_xform->auth.algo;
424 		sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
425 		break;
426 	default:
427 		return -ENOTSUP;
428 	}
429 
430 	/* Set the digest length */
431 	sess->auth.digest_length = auth_xform->auth.digest_length;
432 
433 	/* Verify supported key lengths and extract proper algorithm */
434 	switch (cipher_xform->cipher.key.length << 3) {
435 	case 128:
436 		sess->crypto_func =
437 				CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
438 		sess->cipher.key_sched =
439 				CRYPTO_GET_KEY_SCHED(cop, calg, 128);
440 		break;
441 	case 192:
442 	case 256:
443 		/* These key lengths are not supported yet */
444 	default: /* Fall through */
445 		sess->crypto_func = NULL;
446 		sess->cipher.key_sched = NULL;
447 		return -ENOTSUP;
448 	}
449 
450 	if (unlikely(sess->crypto_func == NULL)) {
451 		/*
452 		 * If we got here that means that there must be a bug
453 		 * in the algorithms selection above. Nevertheless keep
454 		 * it here to catch bug immediately and avoid NULL pointer
455 		 * dereference in OPs processing.
456 		 */
457 		ARMV8_CRYPTO_LOG_ERR(
458 			"No appropriate crypto function for given parameters");
459 		return -EINVAL;
460 	}
461 
462 	/* Set up cipher session prerequisites */
463 	if (cipher_set_prerequisites(sess, cipher_xform) != 0)
464 		return -EINVAL;
465 
466 	/* Set up authentication session prerequisites */
467 	if (auth_set_prerequisites(sess, auth_xform) != 0)
468 		return -EINVAL;
469 
470 	return 0;
471 }
472 
473 /** Parse crypto xform chain and set private session parameters */
474 int
475 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
476 		const struct rte_crypto_sym_xform *xform)
477 {
478 	const struct rte_crypto_sym_xform *cipher_xform = NULL;
479 	const struct rte_crypto_sym_xform *auth_xform = NULL;
480 	bool is_chained_op;
481 	int ret;
482 
483 	/* Filter out spurious/broken requests */
484 	if (xform == NULL)
485 		return -EINVAL;
486 
487 	sess->chain_order = armv8_crypto_get_chain_order(xform);
488 	switch (sess->chain_order) {
489 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
490 		cipher_xform = xform;
491 		auth_xform = xform->next;
492 		is_chained_op = true;
493 		break;
494 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
495 		auth_xform = xform;
496 		cipher_xform = xform->next;
497 		is_chained_op = true;
498 		break;
499 	default:
500 		is_chained_op = false;
501 		return -ENOTSUP;
502 	}
503 
504 	/* Set IV offset */
505 	sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
506 
507 	if (is_chained_op) {
508 		ret = armv8_crypto_set_session_chained_parameters(sess,
509 						cipher_xform, auth_xform);
510 		if (unlikely(ret != 0)) {
511 			ARMV8_CRYPTO_LOG_ERR(
512 			"Invalid/unsupported chained (cipher/auth) parameters");
513 			return ret;
514 		}
515 	} else {
516 		ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
517 		return -ENOTSUP;
518 	}
519 
520 	return 0;
521 }
522 
523 /** Provide session for operation */
524 static inline struct armv8_crypto_session *
525 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
526 {
527 	struct armv8_crypto_session *sess = NULL;
528 
529 	if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
530 		/* get existing session */
531 		if (likely(op->sym->session != NULL)) {
532 			sess = (struct armv8_crypto_session *)
533 					get_session_private_data(
534 					op->sym->session,
535 					cryptodev_driver_id);
536 		}
537 	} else {
538 		/* provide internal session */
539 		void *_sess = NULL;
540 		void *_sess_private_data = NULL;
541 
542 		if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
543 			return NULL;
544 
545 		if (rte_mempool_get(qp->sess_mp, (void **)&_sess_private_data))
546 			return NULL;
547 
548 		sess = (struct armv8_crypto_session *)_sess_private_data;
549 
550 		if (unlikely(armv8_crypto_set_session_parameters(sess,
551 				op->sym->xform) != 0)) {
552 			rte_mempool_put(qp->sess_mp, _sess);
553 			rte_mempool_put(qp->sess_mp, _sess_private_data);
554 			sess = NULL;
555 		}
556 		op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
557 		set_session_private_data(op->sym->session, cryptodev_driver_id,
558 			_sess_private_data);
559 	}
560 
561 	if (unlikely(sess == NULL))
562 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
563 
564 	return sess;
565 }
566 
567 /*
568  *------------------------------------------------------------------------------
569  * Process Operations
570  *------------------------------------------------------------------------------
571  */
572 
573 /*----------------------------------------------------------------------------*/
574 
575 /** Process cipher operation */
576 static inline void
577 process_armv8_chained_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
578 		struct armv8_crypto_session *sess,
579 		struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
580 {
581 	crypto_func_t crypto_func;
582 	crypto_arg_t arg;
583 	struct rte_mbuf *m_asrc, *m_adst;
584 	uint8_t *csrc, *cdst;
585 	uint8_t *adst, *asrc;
586 	uint64_t clen, alen;
587 	int error;
588 
589 	clen = op->sym->cipher.data.length;
590 	alen = op->sym->auth.data.length;
591 
592 	csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
593 			op->sym->cipher.data.offset);
594 	cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
595 			op->sym->cipher.data.offset);
596 
597 	switch (sess->chain_order) {
598 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
599 		m_asrc = m_adst = mbuf_dst;
600 		break;
601 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
602 		m_asrc = mbuf_src;
603 		m_adst = mbuf_dst;
604 		break;
605 	default:
606 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
607 		return;
608 	}
609 	asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
610 				op->sym->auth.data.offset);
611 
612 	switch (sess->auth.mode) {
613 	case ARMV8_CRYPTO_AUTH_AS_AUTH:
614 		/* Nothing to do here, just verify correct option */
615 		break;
616 	case ARMV8_CRYPTO_AUTH_AS_HMAC:
617 		arg.digest.hmac.key = sess->auth.hmac.key;
618 		arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
619 		arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
620 		break;
621 	default:
622 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
623 		return;
624 	}
625 
626 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
627 		adst = op->sym->auth.digest.data;
628 		if (adst == NULL) {
629 			adst = rte_pktmbuf_mtod_offset(m_adst,
630 					uint8_t *,
631 					op->sym->auth.data.offset +
632 					op->sym->auth.data.length);
633 		}
634 	} else {
635 		adst = qp->temp_digest;
636 	}
637 
638 	arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
639 					sess->cipher.iv.offset);
640 	arg.cipher.key = sess->cipher.key.data;
641 	/* Acquire combined mode function */
642 	crypto_func = sess->crypto_func;
643 	ARMV8_CRYPTO_ASSERT(crypto_func != NULL);
644 	error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
645 	if (error != 0) {
646 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
647 		return;
648 	}
649 
650 	op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
651 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
652 		if (memcmp(adst, op->sym->auth.digest.data,
653 				sess->auth.digest_length) != 0) {
654 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
655 		}
656 	}
657 }
658 
659 /** Process crypto operation for mbuf */
660 static inline int
661 process_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
662 		struct armv8_crypto_session *sess)
663 {
664 	struct rte_mbuf *msrc, *mdst;
665 
666 	msrc = op->sym->m_src;
667 	mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
668 
669 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
670 
671 	switch (sess->chain_order) {
672 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
673 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
674 		process_armv8_chained_op(qp, op, sess, msrc, mdst);
675 		break;
676 	default:
677 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
678 		break;
679 	}
680 
681 	/* Free session if a session-less crypto op */
682 	if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
683 		memset(sess, 0, sizeof(struct armv8_crypto_session));
684 		memset(op->sym->session, 0,
685 				rte_cryptodev_get_header_session_size());
686 		rte_mempool_put(qp->sess_mp, sess);
687 		rte_mempool_put(qp->sess_mp, op->sym->session);
688 		op->sym->session = NULL;
689 	}
690 
691 	if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
692 		op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
693 
694 	if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
695 		return -1;
696 
697 	return 0;
698 }
699 
700 /*
701  *------------------------------------------------------------------------------
702  * PMD Framework
703  *------------------------------------------------------------------------------
704  */
705 
706 /** Enqueue burst */
707 static uint16_t
708 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
709 		uint16_t nb_ops)
710 {
711 	struct armv8_crypto_session *sess;
712 	struct armv8_crypto_qp *qp = queue_pair;
713 	int i, retval;
714 
715 	for (i = 0; i < nb_ops; i++) {
716 		sess = get_session(qp, ops[i]);
717 		if (unlikely(sess == NULL))
718 			goto enqueue_err;
719 
720 		retval = process_op(qp, ops[i], sess);
721 		if (unlikely(retval < 0))
722 			goto enqueue_err;
723 	}
724 
725 	retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
726 			NULL);
727 	qp->stats.enqueued_count += retval;
728 
729 	return retval;
730 
731 enqueue_err:
732 	retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
733 			NULL);
734 	if (ops[i] != NULL)
735 		ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
736 
737 	qp->stats.enqueue_err_count++;
738 	return retval;
739 }
740 
741 /** Dequeue burst */
742 static uint16_t
743 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
744 		uint16_t nb_ops)
745 {
746 	struct armv8_crypto_qp *qp = queue_pair;
747 
748 	unsigned int nb_dequeued = 0;
749 
750 	nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
751 			(void **)ops, nb_ops, NULL);
752 	qp->stats.dequeued_count += nb_dequeued;
753 
754 	return nb_dequeued;
755 }
756 
757 /** Create ARMv8 crypto device */
758 static int
759 cryptodev_armv8_crypto_create(const char *name,
760 			struct rte_vdev_device *vdev,
761 			struct rte_cryptodev_pmd_init_params *init_params)
762 {
763 	struct rte_cryptodev *dev;
764 	struct armv8_crypto_private *internals;
765 
766 	/* Check CPU for support for AES instruction set */
767 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
768 		ARMV8_CRYPTO_LOG_ERR(
769 			"AES instructions not supported by CPU");
770 		return -EFAULT;
771 	}
772 
773 	/* Check CPU for support for SHA instruction set */
774 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
775 	    !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
776 		ARMV8_CRYPTO_LOG_ERR(
777 			"SHA1/SHA2 instructions not supported by CPU");
778 		return -EFAULT;
779 	}
780 
781 	/* Check CPU for support for Advance SIMD instruction set */
782 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
783 		ARMV8_CRYPTO_LOG_ERR(
784 			"Advanced SIMD instructions not supported by CPU");
785 		return -EFAULT;
786 	}
787 
788 	dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
789 	if (dev == NULL) {
790 		ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
791 		goto init_error;
792 	}
793 
794 	dev->driver_id = cryptodev_driver_id;
795 	dev->dev_ops = rte_armv8_crypto_pmd_ops;
796 
797 	/* register rx/tx burst functions for data path */
798 	dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
799 	dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
800 
801 	dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
802 			RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
803 			RTE_CRYPTODEV_FF_CPU_NEON |
804 			RTE_CRYPTODEV_FF_CPU_ARM_CE;
805 
806 	/* Set vector instructions mode supported */
807 	internals = dev->data->dev_private;
808 
809 	internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
810 	internals->max_nb_sessions = init_params->max_nb_sessions;
811 
812 	return 0;
813 
814 init_error:
815 	ARMV8_CRYPTO_LOG_ERR(
816 		"driver %s: cryptodev_armv8_crypto_create failed",
817 		init_params->name);
818 
819 	cryptodev_armv8_crypto_uninit(vdev);
820 	return -EFAULT;
821 }
822 
823 /** Initialise ARMv8 crypto device */
824 static int
825 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
826 {
827 	struct rte_cryptodev_pmd_init_params init_params = {
828 		"",
829 		sizeof(struct armv8_crypto_private),
830 		rte_socket_id(),
831 		RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS,
832 		RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_SESSIONS
833 	};
834 	const char *name;
835 	const char *input_args;
836 
837 	name = rte_vdev_device_name(vdev);
838 	if (name == NULL)
839 		return -EINVAL;
840 	input_args = rte_vdev_device_args(vdev);
841 	rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
842 
843 	return cryptodev_armv8_crypto_create(name, vdev, &init_params);
844 }
845 
846 /** Uninitialise ARMv8 crypto device */
847 static int
848 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
849 {
850 	struct rte_cryptodev *cryptodev;
851 	const char *name;
852 
853 	name = rte_vdev_device_name(vdev);
854 	if (name == NULL)
855 		return -EINVAL;
856 
857 	RTE_LOG(INFO, PMD,
858 		"Closing ARMv8 crypto device %s on numa socket %u\n",
859 		name, rte_socket_id());
860 
861 	cryptodev = rte_cryptodev_pmd_get_named_dev(name);
862 	if (cryptodev == NULL)
863 		return -ENODEV;
864 
865 	return rte_cryptodev_pmd_destroy(cryptodev);
866 }
867 
868 static struct rte_vdev_driver armv8_crypto_pmd_drv = {
869 	.probe = cryptodev_armv8_crypto_init,
870 	.remove = cryptodev_armv8_crypto_uninit
871 };
872 
873 static struct cryptodev_driver armv8_crypto_drv;
874 
875 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_pmd_drv);
876 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
877 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
878 	"max_nb_queue_pairs=<int> "
879 	"max_nb_sessions=<int> "
880 	"socket_id=<int>");
881 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, armv8_crypto_pmd_drv,
882 		cryptodev_driver_id);
883