xref: /dpdk/drivers/crypto/armv8/rte_armv8_pmd.c (revision 93998f3c5f22747e4f2c5e8714fa5cbe6c9d1574)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2017 Cavium, Inc
3  */
4 
5 #include <stdbool.h>
6 
7 #include <rte_common.h>
8 #include <rte_hexdump.h>
9 #include <rte_cryptodev.h>
10 #include <cryptodev_pmd.h>
11 #include <bus_vdev_driver.h>
12 #include <rte_malloc.h>
13 #include <rte_cpuflags.h>
14 
15 #include "AArch64cryptolib.h"
16 
17 #include "armv8_pmd_private.h"
18 
19 static uint8_t cryptodev_driver_id;
20 
21 static int cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev);
22 
23 /**
24  * Pointers to the supported combined mode crypto functions are stored
25  * in the static tables. Each combined (chained) cryptographic operation
26  * can be described by a set of numbers:
27  * - order:	order of operations (cipher, auth) or (auth, cipher)
28  * - direction:	encryption or decryption
29  * - calg:	cipher algorithm such as AES_CBC, AES_CTR, etc.
30  * - aalg:	authentication algorithm such as SHA1, SHA256, etc.
31  * - keyl:	cipher key length, for example 128, 192, 256 bits
32  *
33  * In order to quickly acquire each function pointer based on those numbers,
34  * a hierarchy of arrays is maintained. The final level, 3D array is indexed
35  * by the combined mode function parameters only (cipher algorithm,
36  * authentication algorithm and key length).
37  *
38  * This gives 3 memory accesses to obtain a function pointer instead of
39  * traversing the array manually and comparing function parameters on each loop.
40  *
41  *                   +--+CRYPTO_FUNC
42  *            +--+ENC|
43  *      +--+CA|
44  *      |     +--+DEC
45  * ORDER|
46  *      |     +--+ENC
47  *      +--+AC|
48  *            +--+DEC
49  *
50  */
51 
52 /**
53  * 3D array type for ARM Combined Mode crypto functions pointers.
54  * CRYPTO_CIPHER_MAX:			max cipher ID number
55  * CRYPTO_AUTH_MAX:			max auth ID number
56  * CRYPTO_CIPHER_KEYLEN_MAX:		max key length ID number
57  */
58 typedef const crypto_func_t
59 crypto_func_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_AUTH_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
60 
61 /* Evaluate to key length definition */
62 #define KEYL(keyl)		(ARMV8_CRYPTO_CIPHER_KEYLEN_ ## keyl)
63 
64 /* Local aliases for supported ciphers */
65 #define CIPH_AES_CBC		RTE_CRYPTO_CIPHER_AES_CBC
66 /* Local aliases for supported hashes */
67 #define AUTH_SHA1_HMAC		RTE_CRYPTO_AUTH_SHA1_HMAC
68 #define AUTH_SHA256_HMAC	RTE_CRYPTO_AUTH_SHA256_HMAC
69 
70 /**
71  * Arrays containing pointers to particular cryptographic,
72  * combined mode functions.
73  * crypto_op_ca_encrypt:	cipher (encrypt), authenticate
74  * crypto_op_ca_decrypt:	cipher (decrypt), authenticate
75  * crypto_op_ac_encrypt:	authenticate, cipher (encrypt)
76  * crypto_op_ac_decrypt:	authenticate, cipher (decrypt)
77  */
78 static const crypto_func_tbl_t
79 crypto_op_ca_encrypt = {
80 	/* [cipher alg][auth alg][key length] = crypto_function, */
81 	[CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] =
82 		armv8_enc_aes_cbc_sha1_128,
83 	[CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] =
84 		armv8_enc_aes_cbc_sha256_128,
85 };
86 
87 static const crypto_func_tbl_t
88 crypto_op_ca_decrypt = {
89 	{ {NULL} }
90 };
91 
92 static const crypto_func_tbl_t
93 crypto_op_ac_encrypt = {
94 	{ {NULL} }
95 };
96 
97 static const crypto_func_tbl_t
98 crypto_op_ac_decrypt = {
99 	/* [cipher alg][auth alg][key length] = crypto_function, */
100 	[CIPH_AES_CBC][AUTH_SHA1_HMAC][KEYL(128)] =
101 		armv8_dec_aes_cbc_sha1_128,
102 	[CIPH_AES_CBC][AUTH_SHA256_HMAC][KEYL(128)] =
103 		armv8_dec_aes_cbc_sha256_128,
104 };
105 
106 /**
107  * Arrays containing pointers to particular cryptographic function sets,
108  * covering given cipher operation directions (encrypt, decrypt)
109  * for each order of cipher and authentication pairs.
110  */
111 static const crypto_func_tbl_t *
112 crypto_cipher_auth[] = {
113 	&crypto_op_ca_encrypt,
114 	&crypto_op_ca_decrypt,
115 	NULL
116 };
117 
118 static const crypto_func_tbl_t *
119 crypto_auth_cipher[] = {
120 	&crypto_op_ac_encrypt,
121 	&crypto_op_ac_decrypt,
122 	NULL
123 };
124 
125 /**
126  * Top level array containing pointers to particular cryptographic
127  * function sets, covering given order of chained operations.
128  * crypto_cipher_auth:	cipher first, authenticate after
129  * crypto_auth_cipher:	authenticate first, cipher after
130  */
131 static const crypto_func_tbl_t **
132 crypto_chain_order[] = {
133 	crypto_cipher_auth,
134 	crypto_auth_cipher,
135 	NULL
136 };
137 
138 /**
139  * Extract particular combined mode crypto function from the 3D array.
140  */
141 #define CRYPTO_GET_ALGO(order, cop, calg, aalg, keyl)			\
142 __extension__ ({							\
143 	crypto_func_tbl_t *func_tbl =					\
144 				(crypto_chain_order[(order)])[(cop)];	\
145 									\
146 	((calg >= CRYPTO_CIPHER_MAX) || (aalg >= CRYPTO_AUTH_MAX)) ?	\
147 		NULL : ((*func_tbl)[(calg)][(aalg)][KEYL(keyl)]);	\
148 })
149 
150 /*----------------------------------------------------------------------------*/
151 
152 /**
153  * 2D array type for ARM key schedule functions pointers.
154  * CRYPTO_CIPHER_MAX:			max cipher ID number
155  * CRYPTO_CIPHER_KEYLEN_MAX:		max key length ID number
156  */
157 typedef const crypto_key_sched_t
158 crypto_key_sched_tbl_t[CRYPTO_CIPHER_MAX][CRYPTO_CIPHER_KEYLEN_MAX];
159 
160 static const crypto_key_sched_tbl_t
161 crypto_key_sched_encrypt = {
162 	/* [cipher alg][key length] = key_expand_func, */
163 	[CIPH_AES_CBC][KEYL(128)] = armv8_expandkeys_enc_aes_cbc_128,
164 };
165 
166 static const crypto_key_sched_tbl_t
167 crypto_key_sched_decrypt = {
168 	/* [cipher alg][key length] = key_expand_func, */
169 	[CIPH_AES_CBC][KEYL(128)] = armv8_expandkeys_dec_aes_cbc_128,
170 };
171 
172 /**
173  * Top level array containing pointers to particular key generation
174  * function sets, covering given operation direction.
175  * crypto_key_sched_encrypt:	keys for encryption
176  * crypto_key_sched_decrypt:	keys for decryption
177  */
178 static const crypto_key_sched_tbl_t *
179 crypto_key_sched_dir[] = {
180 	&crypto_key_sched_encrypt,
181 	&crypto_key_sched_decrypt,
182 	NULL
183 };
184 
185 /**
186  * Extract particular combined mode crypto function from the 3D array.
187  */
188 #define CRYPTO_GET_KEY_SCHED(cop, calg, keyl)				\
189 __extension__ ({							\
190 	crypto_key_sched_tbl_t *ks_tbl = crypto_key_sched_dir[(cop)];	\
191 									\
192 	(calg >= CRYPTO_CIPHER_MAX) ?					\
193 		NULL : ((*ks_tbl)[(calg)][KEYL(keyl)]);			\
194 })
195 
196 /*----------------------------------------------------------------------------*/
197 
198 /*
199  *------------------------------------------------------------------------------
200  * Session Prepare
201  *------------------------------------------------------------------------------
202  */
203 
204 /** Get xform chain order */
205 static enum armv8_crypto_chain_order
armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform * xform)206 armv8_crypto_get_chain_order(const struct rte_crypto_sym_xform *xform)
207 {
208 
209 	/*
210 	 * This driver currently covers only chained operations.
211 	 * Ignore only cipher or only authentication operations
212 	 * or chains longer than 2 xform structures.
213 	 */
214 	if (xform->next == NULL || xform->next->next != NULL)
215 		return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
216 
217 	if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
218 		if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
219 			return ARMV8_CRYPTO_CHAIN_AUTH_CIPHER;
220 	}
221 
222 	if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
223 		if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
224 			return ARMV8_CRYPTO_CHAIN_CIPHER_AUTH;
225 	}
226 
227 	return ARMV8_CRYPTO_CHAIN_NOT_SUPPORTED;
228 }
229 
230 static inline void
auth_hmac_pad_prepare(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)231 auth_hmac_pad_prepare(struct armv8_crypto_session *sess,
232 				const struct rte_crypto_sym_xform *xform)
233 {
234 	size_t i;
235 
236 	/* Generate i_key_pad and o_key_pad */
237 	memset(sess->auth.hmac.i_key_pad, 0, sizeof(sess->auth.hmac.i_key_pad));
238 	rte_memcpy(sess->auth.hmac.i_key_pad, sess->auth.hmac.key,
239 							xform->auth.key.length);
240 	memset(sess->auth.hmac.o_key_pad, 0, sizeof(sess->auth.hmac.o_key_pad));
241 	rte_memcpy(sess->auth.hmac.o_key_pad, sess->auth.hmac.key,
242 							xform->auth.key.length);
243 	/*
244 	 * XOR key with IPAD/OPAD values to obtain i_key_pad
245 	 * and o_key_pad.
246 	 * Byte-by-byte operation may seem to be the less efficient
247 	 * here but in fact it's the opposite.
248 	 * The result ASM code is likely operate on NEON registers
249 	 * (load auth key to Qx, load IPAD/OPAD to multiple
250 	 * elements of Qy, eor 128 bits at once).
251 	 */
252 	for (i = 0; i < SHA_BLOCK_MAX; i++) {
253 		sess->auth.hmac.i_key_pad[i] ^= HMAC_IPAD_VALUE;
254 		sess->auth.hmac.o_key_pad[i] ^= HMAC_OPAD_VALUE;
255 	}
256 }
257 
258 static inline int
auth_set_prerequisites(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)259 auth_set_prerequisites(struct armv8_crypto_session *sess,
260 			const struct rte_crypto_sym_xform *xform)
261 {
262 	uint8_t partial[64] = { 0 };
263 	int error;
264 
265 	switch (xform->auth.algo) {
266 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
267 		/*
268 		 * Generate authentication key, i_key_pad and o_key_pad.
269 		 */
270 		/* Zero memory under key */
271 		memset(sess->auth.hmac.key, 0, SHA1_BLOCK_SIZE);
272 
273 		/*
274 		 * Now copy the given authentication key to the session
275 		 * key.
276 		 */
277 		rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
278 						xform->auth.key.length);
279 
280 		/* Prepare HMAC padding: key|pattern */
281 		auth_hmac_pad_prepare(sess, xform);
282 		/*
283 		 * Calculate partial hash values for i_key_pad and o_key_pad.
284 		 * Will be used as initialization state for final HMAC.
285 		 */
286 		error = armv8_sha1_block_partial(NULL,
287 				sess->auth.hmac.i_key_pad,
288 				partial, SHA1_BLOCK_SIZE);
289 		if (error != 0)
290 			return -1;
291 		memcpy(sess->auth.hmac.i_key_pad, partial, SHA1_BLOCK_SIZE);
292 
293 		error = armv8_sha1_block_partial(NULL,
294 				sess->auth.hmac.o_key_pad,
295 				partial, SHA1_BLOCK_SIZE);
296 		if (error != 0)
297 			return -1;
298 		memcpy(sess->auth.hmac.o_key_pad, partial, SHA1_BLOCK_SIZE);
299 
300 		break;
301 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
302 		/*
303 		 * Generate authentication key, i_key_pad and o_key_pad.
304 		 */
305 		/* Zero memory under key */
306 		memset(sess->auth.hmac.key, 0, SHA256_BLOCK_SIZE);
307 
308 		/*
309 		 * Now copy the given authentication key to the session
310 		 * key.
311 		 */
312 		rte_memcpy(sess->auth.hmac.key, xform->auth.key.data,
313 						xform->auth.key.length);
314 
315 		/* Prepare HMAC padding: key|pattern */
316 		auth_hmac_pad_prepare(sess, xform);
317 		/*
318 		 * Calculate partial hash values for i_key_pad and o_key_pad.
319 		 * Will be used as initialization state for final HMAC.
320 		 */
321 		error = armv8_sha256_block_partial(NULL,
322 				sess->auth.hmac.i_key_pad,
323 				partial, SHA256_BLOCK_SIZE);
324 		if (error != 0)
325 			return -1;
326 		memcpy(sess->auth.hmac.i_key_pad, partial, SHA256_BLOCK_SIZE);
327 
328 		error = armv8_sha256_block_partial(NULL,
329 				sess->auth.hmac.o_key_pad,
330 				partial, SHA256_BLOCK_SIZE);
331 		if (error != 0)
332 			return -1;
333 		memcpy(sess->auth.hmac.o_key_pad, partial, SHA256_BLOCK_SIZE);
334 
335 		break;
336 	default:
337 		break;
338 	}
339 
340 	return 0;
341 }
342 
343 static inline int
cipher_set_prerequisites(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)344 cipher_set_prerequisites(struct armv8_crypto_session *sess,
345 			const struct rte_crypto_sym_xform *xform)
346 {
347 	crypto_key_sched_t cipher_key_sched;
348 
349 	cipher_key_sched = sess->cipher.key_sched;
350 	if (likely(cipher_key_sched != NULL)) {
351 		/* Set up cipher session key */
352 		cipher_key_sched(sess->cipher.key.data, xform->cipher.key.data);
353 	}
354 
355 	return 0;
356 }
357 
358 static int
armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * cipher_xform,const struct rte_crypto_sym_xform * auth_xform)359 armv8_crypto_set_session_chained_parameters(struct armv8_crypto_session *sess,
360 		const struct rte_crypto_sym_xform *cipher_xform,
361 		const struct rte_crypto_sym_xform *auth_xform)
362 {
363 	enum armv8_crypto_chain_order order;
364 	enum armv8_crypto_cipher_operation cop;
365 	enum rte_crypto_cipher_algorithm calg;
366 	enum rte_crypto_auth_algorithm aalg;
367 
368 	/* Validate and prepare scratch order of combined operations */
369 	switch (sess->chain_order) {
370 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
371 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
372 		order = sess->chain_order;
373 		break;
374 	default:
375 		return -ENOTSUP;
376 	}
377 	/* Select cipher direction */
378 	sess->cipher.direction = cipher_xform->cipher.op;
379 	/* Select cipher key */
380 	sess->cipher.key.length = cipher_xform->cipher.key.length;
381 	/* Set cipher direction */
382 	switch (sess->cipher.direction) {
383 	case RTE_CRYPTO_CIPHER_OP_ENCRYPT:
384 		cop = ARMV8_CRYPTO_CIPHER_OP_ENCRYPT;
385 		break;
386 	case RTE_CRYPTO_CIPHER_OP_DECRYPT:
387 		cop = ARMV8_CRYPTO_CIPHER_OP_DECRYPT;
388 		break;
389 	default:
390 		return -ENOTSUP;
391 	}
392 	/* Set cipher algorithm */
393 	calg = cipher_xform->cipher.algo;
394 
395 	/* Select cipher algo */
396 	switch (calg) {
397 	/* Cover supported cipher algorithms */
398 	case RTE_CRYPTO_CIPHER_AES_CBC:
399 		sess->cipher.algo = calg;
400 		/* IV len is always 16 bytes (block size) for AES CBC */
401 		sess->cipher.iv.length = 16;
402 		break;
403 	default:
404 		return -ENOTSUP;
405 	}
406 	/* Select auth generate/verify */
407 	sess->auth.operation = auth_xform->auth.op;
408 
409 	/* Select auth algo */
410 	switch (auth_xform->auth.algo) {
411 	/* Cover supported hash algorithms */
412 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
413 	case RTE_CRYPTO_AUTH_SHA256_HMAC: /* Fall through */
414 		aalg = auth_xform->auth.algo;
415 		sess->auth.mode = ARMV8_CRYPTO_AUTH_AS_HMAC;
416 		break;
417 	default:
418 		return -ENOTSUP;
419 	}
420 
421 	/* Set the digest length */
422 	sess->auth.digest_length = auth_xform->auth.digest_length;
423 
424 	/* Verify supported key lengths and extract proper algorithm */
425 	switch (cipher_xform->cipher.key.length << 3) {
426 	case 128:
427 		sess->crypto_func =
428 				CRYPTO_GET_ALGO(order, cop, calg, aalg, 128);
429 		sess->cipher.key_sched =
430 				CRYPTO_GET_KEY_SCHED(cop, calg, 128);
431 		break;
432 	case 192:
433 	case 256:
434 		/* These key lengths are not supported yet */
435 	default: /* Fall through */
436 		sess->crypto_func = NULL;
437 		sess->cipher.key_sched = NULL;
438 		return -ENOTSUP;
439 	}
440 
441 	if (unlikely(sess->crypto_func == NULL ||
442 		sess->cipher.key_sched == NULL)) {
443 		/*
444 		 * If we got here that means that there must be a bug
445 		 * in the algorithms selection above. Nevertheless keep
446 		 * it here to catch bug immediately and avoid NULL pointer
447 		 * dereference in OPs processing.
448 		 */
449 		ARMV8_CRYPTO_LOG_ERR(
450 			"No appropriate crypto function for given parameters");
451 		return -EINVAL;
452 	}
453 
454 	/* Set up cipher session prerequisites */
455 	if (cipher_set_prerequisites(sess, cipher_xform) != 0)
456 		return -EINVAL;
457 
458 	/* Set up authentication session prerequisites */
459 	if (auth_set_prerequisites(sess, auth_xform) != 0)
460 		return -EINVAL;
461 
462 	return 0;
463 }
464 
465 /** Parse crypto xform chain and set private session parameters */
466 int
armv8_crypto_set_session_parameters(struct armv8_crypto_session * sess,const struct rte_crypto_sym_xform * xform)467 armv8_crypto_set_session_parameters(struct armv8_crypto_session *sess,
468 		const struct rte_crypto_sym_xform *xform)
469 {
470 	const struct rte_crypto_sym_xform *cipher_xform = NULL;
471 	const struct rte_crypto_sym_xform *auth_xform = NULL;
472 	bool is_chained_op;
473 	int ret;
474 
475 	/* Filter out spurious/broken requests */
476 	if (xform == NULL)
477 		return -EINVAL;
478 
479 	sess->chain_order = armv8_crypto_get_chain_order(xform);
480 	switch (sess->chain_order) {
481 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
482 		cipher_xform = xform;
483 		auth_xform = xform->next;
484 		is_chained_op = true;
485 		break;
486 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
487 		auth_xform = xform;
488 		cipher_xform = xform->next;
489 		is_chained_op = true;
490 		break;
491 	default:
492 		is_chained_op = false;
493 		return -ENOTSUP;
494 	}
495 
496 	/* Set IV offset */
497 	sess->cipher.iv.offset = cipher_xform->cipher.iv.offset;
498 
499 	if (is_chained_op) {
500 		ret = armv8_crypto_set_session_chained_parameters(sess,
501 						cipher_xform, auth_xform);
502 		if (unlikely(ret != 0)) {
503 			ARMV8_CRYPTO_LOG_ERR(
504 			"Invalid/unsupported chained (cipher/auth) parameters");
505 			return ret;
506 		}
507 	} else {
508 		ARMV8_CRYPTO_LOG_ERR("Invalid/unsupported operation");
509 		return -ENOTSUP;
510 	}
511 
512 	return 0;
513 }
514 
515 /** Provide session for operation */
516 static inline struct armv8_crypto_session *
get_session(struct armv8_crypto_qp * qp,struct rte_crypto_op * op)517 get_session(struct armv8_crypto_qp *qp, struct rte_crypto_op *op)
518 {
519 	struct armv8_crypto_session *sess = NULL;
520 
521 	if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
522 		/* get existing session */
523 		if (likely(op->sym->session != NULL)) {
524 			sess = CRYPTODEV_GET_SYM_SESS_PRIV(op->sym->session);
525 		}
526 	} else {
527 		/* provide internal session */
528 		struct rte_cryptodev_sym_session *_sess = NULL;
529 
530 		if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
531 			return NULL;
532 
533 		sess = (struct armv8_crypto_session *)_sess->driver_priv_data;
534 
535 		if (unlikely(armv8_crypto_set_session_parameters(sess,
536 				op->sym->xform) != 0)) {
537 			rte_mempool_put(qp->sess_mp, _sess);
538 			sess = NULL;
539 		}
540 		op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
541 	}
542 
543 	if (unlikely(sess == NULL))
544 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
545 
546 	return sess;
547 }
548 
549 /*
550  *------------------------------------------------------------------------------
551  * Process Operations
552  *------------------------------------------------------------------------------
553  */
554 
555 /*----------------------------------------------------------------------------*/
556 
557 /** Process cipher operation */
558 static inline void
process_armv8_chained_op(struct armv8_crypto_qp * qp,struct rte_crypto_op * op,struct armv8_crypto_session * sess,struct rte_mbuf * mbuf_src,struct rte_mbuf * mbuf_dst)559 process_armv8_chained_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
560 		struct armv8_crypto_session *sess,
561 		struct rte_mbuf *mbuf_src, struct rte_mbuf *mbuf_dst)
562 {
563 	crypto_func_t crypto_func;
564 	armv8_cipher_digest_t arg;
565 	struct rte_mbuf *m_asrc, *m_adst;
566 	uint8_t *csrc, *cdst;
567 	uint8_t *adst, *asrc;
568 	uint64_t clen, alen;
569 	int error;
570 
571 	clen = op->sym->cipher.data.length;
572 	alen = op->sym->auth.data.length;
573 
574 	csrc = rte_pktmbuf_mtod_offset(mbuf_src, uint8_t *,
575 			op->sym->cipher.data.offset);
576 	cdst = rte_pktmbuf_mtod_offset(mbuf_dst, uint8_t *,
577 			op->sym->cipher.data.offset);
578 
579 	switch (sess->chain_order) {
580 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
581 		m_asrc = m_adst = mbuf_dst;
582 		break;
583 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER:
584 		m_asrc = mbuf_src;
585 		m_adst = mbuf_dst;
586 		break;
587 	default:
588 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
589 		return;
590 	}
591 	asrc = rte_pktmbuf_mtod_offset(m_asrc, uint8_t *,
592 				op->sym->auth.data.offset);
593 
594 	switch (sess->auth.mode) {
595 	case ARMV8_CRYPTO_AUTH_AS_AUTH:
596 		/* Nothing to do here, just verify correct option */
597 		break;
598 	case ARMV8_CRYPTO_AUTH_AS_HMAC:
599 		arg.digest.hmac.key = sess->auth.hmac.key;
600 		arg.digest.hmac.i_key_pad = sess->auth.hmac.i_key_pad;
601 		arg.digest.hmac.o_key_pad = sess->auth.hmac.o_key_pad;
602 		break;
603 	default:
604 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
605 		return;
606 	}
607 
608 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_GENERATE) {
609 		adst = op->sym->auth.digest.data;
610 		if (adst == NULL) {
611 			adst = rte_pktmbuf_mtod_offset(m_adst,
612 					uint8_t *,
613 					op->sym->auth.data.offset +
614 					op->sym->auth.data.length);
615 		}
616 	} else {
617 		adst = qp->temp_digest;
618 	}
619 
620 	arg.cipher.iv = rte_crypto_op_ctod_offset(op, uint8_t *,
621 					sess->cipher.iv.offset);
622 	arg.cipher.key = sess->cipher.key.data;
623 	/* Acquire combined mode function */
624 	crypto_func = sess->crypto_func;
625 	RTE_VERIFY(crypto_func != NULL);
626 	error = crypto_func(csrc, cdst, clen, asrc, adst, alen, &arg);
627 	if (error != 0) {
628 		op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
629 		return;
630 	}
631 
632 	op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
633 	if (sess->auth.operation == RTE_CRYPTO_AUTH_OP_VERIFY) {
634 		if (memcmp(adst, op->sym->auth.digest.data,
635 				sess->auth.digest_length) != 0) {
636 			op->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
637 		}
638 	}
639 }
640 
641 /** Process crypto operation for mbuf */
642 static inline int
process_op(struct armv8_crypto_qp * qp,struct rte_crypto_op * op,struct armv8_crypto_session * sess)643 process_op(struct armv8_crypto_qp *qp, struct rte_crypto_op *op,
644 		struct armv8_crypto_session *sess)
645 {
646 	struct rte_mbuf *msrc, *mdst;
647 
648 	msrc = op->sym->m_src;
649 	mdst = op->sym->m_dst ? op->sym->m_dst : op->sym->m_src;
650 
651 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
652 
653 	switch (sess->chain_order) {
654 	case ARMV8_CRYPTO_CHAIN_CIPHER_AUTH:
655 	case ARMV8_CRYPTO_CHAIN_AUTH_CIPHER: /* Fall through */
656 		process_armv8_chained_op(qp, op, sess, msrc, mdst);
657 		break;
658 	default:
659 		op->status = RTE_CRYPTO_OP_STATUS_ERROR;
660 		break;
661 	}
662 
663 	/* Free session if a session-less crypto op */
664 	if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
665 		memset(sess, 0, sizeof(struct armv8_crypto_session));
666 		rte_mempool_put(qp->sess_mp, op->sym->session);
667 		op->sym->session = NULL;
668 	}
669 
670 	if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
671 		op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
672 
673 	if (unlikely(op->status == RTE_CRYPTO_OP_STATUS_ERROR))
674 		return -1;
675 
676 	return 0;
677 }
678 
679 /*
680  *------------------------------------------------------------------------------
681  * PMD Framework
682  *------------------------------------------------------------------------------
683  */
684 
685 /** Enqueue burst */
686 static uint16_t
armv8_crypto_pmd_enqueue_burst(void * queue_pair,struct rte_crypto_op ** ops,uint16_t nb_ops)687 armv8_crypto_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
688 		uint16_t nb_ops)
689 {
690 	struct armv8_crypto_session *sess;
691 	struct armv8_crypto_qp *qp = queue_pair;
692 	int i, retval;
693 
694 	for (i = 0; i < nb_ops; i++) {
695 		sess = get_session(qp, ops[i]);
696 		if (unlikely(sess == NULL))
697 			goto enqueue_err;
698 
699 		retval = process_op(qp, ops[i], sess);
700 		if (unlikely(retval < 0))
701 			goto enqueue_err;
702 	}
703 
704 	retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
705 			NULL);
706 	qp->stats.enqueued_count += retval;
707 
708 	return retval;
709 
710 enqueue_err:
711 	retval = rte_ring_enqueue_burst(qp->processed_ops, (void *)ops, i,
712 			NULL);
713 	if (ops[i] != NULL)
714 		ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
715 
716 	qp->stats.enqueue_err_count++;
717 	return retval;
718 }
719 
720 /** Dequeue burst */
721 static uint16_t
armv8_crypto_pmd_dequeue_burst(void * queue_pair,struct rte_crypto_op ** ops,uint16_t nb_ops)722 armv8_crypto_pmd_dequeue_burst(void *queue_pair, struct rte_crypto_op **ops,
723 		uint16_t nb_ops)
724 {
725 	struct armv8_crypto_qp *qp = queue_pair;
726 
727 	unsigned int nb_dequeued = 0;
728 
729 	nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
730 			(void **)ops, nb_ops, NULL);
731 	qp->stats.dequeued_count += nb_dequeued;
732 
733 	return nb_dequeued;
734 }
735 
736 /** Create ARMv8 crypto device */
737 static int
cryptodev_armv8_crypto_create(const char * name,struct rte_vdev_device * vdev,struct rte_cryptodev_pmd_init_params * init_params)738 cryptodev_armv8_crypto_create(const char *name,
739 			struct rte_vdev_device *vdev,
740 			struct rte_cryptodev_pmd_init_params *init_params)
741 {
742 	struct rte_cryptodev *dev;
743 	struct armv8_crypto_private *internals;
744 
745 	/* Check CPU for support for AES instruction set */
746 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_AES)) {
747 		ARMV8_CRYPTO_LOG_ERR(
748 			"AES instructions not supported by CPU");
749 		return -EFAULT;
750 	}
751 
752 	/* Check CPU for support for SHA instruction set */
753 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA1) ||
754 	    !rte_cpu_get_flag_enabled(RTE_CPUFLAG_SHA2)) {
755 		ARMV8_CRYPTO_LOG_ERR(
756 			"SHA1/SHA2 instructions not supported by CPU");
757 		return -EFAULT;
758 	}
759 
760 	/* Check CPU for support for Advance SIMD instruction set */
761 	if (!rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON)) {
762 		ARMV8_CRYPTO_LOG_ERR(
763 			"Advanced SIMD instructions not supported by CPU");
764 		return -EFAULT;
765 	}
766 
767 	dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
768 	if (dev == NULL) {
769 		ARMV8_CRYPTO_LOG_ERR("failed to create cryptodev vdev");
770 		goto init_error;
771 	}
772 
773 	dev->driver_id = cryptodev_driver_id;
774 	dev->dev_ops = rte_armv8_crypto_pmd_ops;
775 
776 	/* register rx/tx burst functions for data path */
777 	dev->dequeue_burst = armv8_crypto_pmd_dequeue_burst;
778 	dev->enqueue_burst = armv8_crypto_pmd_enqueue_burst;
779 
780 	dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
781 			RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING |
782 			RTE_CRYPTODEV_FF_CPU_NEON |
783 			RTE_CRYPTODEV_FF_CPU_ARM_CE |
784 			RTE_CRYPTODEV_FF_SYM_SESSIONLESS;
785 
786 	internals = dev->data->dev_private;
787 
788 	internals->max_nb_qpairs = init_params->max_nb_queue_pairs;
789 
790 	rte_cryptodev_pmd_probing_finish(dev);
791 
792 	return 0;
793 
794 init_error:
795 	ARMV8_CRYPTO_LOG_ERR(
796 		"driver %s: cryptodev_armv8_crypto_create failed",
797 		init_params->name);
798 
799 	cryptodev_armv8_crypto_uninit(vdev);
800 	return -EFAULT;
801 }
802 
803 /** Initialise ARMv8 crypto device */
804 static int
cryptodev_armv8_crypto_init(struct rte_vdev_device * vdev)805 cryptodev_armv8_crypto_init(struct rte_vdev_device *vdev)
806 {
807 	struct rte_cryptodev_pmd_init_params init_params = {
808 		"",
809 		sizeof(struct armv8_crypto_private),
810 		rte_socket_id(),
811 		RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS
812 	};
813 	const char *name;
814 	const char *input_args;
815 
816 	name = rte_vdev_device_name(vdev);
817 	if (name == NULL)
818 		return -EINVAL;
819 	input_args = rte_vdev_device_args(vdev);
820 	rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
821 
822 	return cryptodev_armv8_crypto_create(name, vdev, &init_params);
823 }
824 
825 /** Uninitialise ARMv8 crypto device */
826 static int
cryptodev_armv8_crypto_uninit(struct rte_vdev_device * vdev)827 cryptodev_armv8_crypto_uninit(struct rte_vdev_device *vdev)
828 {
829 	struct rte_cryptodev *cryptodev;
830 	const char *name;
831 
832 	name = rte_vdev_device_name(vdev);
833 	if (name == NULL)
834 		return -EINVAL;
835 
836 	ARMV8_CRYPTO_LOG_INFO(
837 		"Closing ARMv8 crypto device %s on numa socket %u",
838 		name, rte_socket_id());
839 
840 	cryptodev = rte_cryptodev_pmd_get_named_dev(name);
841 	if (cryptodev == NULL)
842 		return -ENODEV;
843 
844 	return rte_cryptodev_pmd_destroy(cryptodev);
845 }
846 
847 static struct rte_vdev_driver armv8_crypto_pmd_drv = {
848 	.probe = cryptodev_armv8_crypto_init,
849 	.remove = cryptodev_armv8_crypto_uninit
850 };
851 
852 static struct cryptodev_driver armv8_crypto_drv;
853 
854 RTE_LOG_REGISTER_DEFAULT(crypto_armv8_log_type, ERR);
855 
856 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_ARMV8_PMD, armv8_crypto_pmd_drv);
857 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_ARMV8_PMD, cryptodev_armv8_pmd);
858 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_ARMV8_PMD,
859 	"max_nb_queue_pairs=<int> "
860 	"socket_id=<int>");
861 RTE_PMD_REGISTER_CRYPTO_DRIVER(armv8_crypto_drv, armv8_crypto_pmd_drv.driver,
862 		cryptodev_driver_id);
863