xref: /dpdk/lib/cryptodev/rte_crypto.h (revision daa02b5cddbb8e11b31d41e2bf7bb1ae64dcae2f)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2017 Intel Corporation
3  */
4 
5 #ifndef _RTE_CRYPTO_H_
6 #define _RTE_CRYPTO_H_
7 
8 /**
9  * @file rte_crypto.h
10  *
11  * RTE Cryptography Common Definitions
12  *
13  */
14 
15 #ifdef __cplusplus
16 extern "C" {
17 #endif
18 
19 
20 #include <rte_mbuf.h>
21 #include <rte_memory.h>
22 #include <rte_mempool.h>
23 #include <rte_common.h>
24 
25 #include "rte_crypto_sym.h"
26 #include "rte_crypto_asym.h"
27 
28 /** Crypto operation types */
29 enum rte_crypto_op_type {
30 	RTE_CRYPTO_OP_TYPE_UNDEFINED,
31 	/**< Undefined operation type */
32 	RTE_CRYPTO_OP_TYPE_SYMMETRIC,
33 	/**< Symmetric operation */
34 	RTE_CRYPTO_OP_TYPE_ASYMMETRIC
35 	/**< Asymmetric operation */
36 };
37 
38 /** Status of crypto operation */
39 enum rte_crypto_op_status {
40 	RTE_CRYPTO_OP_STATUS_SUCCESS,
41 	/**< Operation completed successfully */
42 	RTE_CRYPTO_OP_STATUS_NOT_PROCESSED,
43 	/**< Operation has not yet been processed by a crypto device */
44 	RTE_CRYPTO_OP_STATUS_AUTH_FAILED,
45 	/**< Authentication verification failed */
46 	RTE_CRYPTO_OP_STATUS_INVALID_SESSION,
47 	/**<
48 	 * Symmetric operation failed due to invalid session arguments, or if
49 	 * in session-less mode, failed to allocate private operation material.
50 	 */
51 	RTE_CRYPTO_OP_STATUS_INVALID_ARGS,
52 	/**< Operation failed due to invalid arguments in request */
53 	RTE_CRYPTO_OP_STATUS_ERROR,
54 	/**< Error handling operation */
55 };
56 
57 /**
58  * Crypto operation session type. This is used to specify whether a crypto
59  * operation has session structure attached for immutable parameters or if all
60  * operation information is included in the operation data structure.
61  */
62 enum rte_crypto_op_sess_type {
63 	RTE_CRYPTO_OP_WITH_SESSION,	/**< Session based crypto operation */
64 	RTE_CRYPTO_OP_SESSIONLESS,	/**< Session-less crypto operation */
65 	RTE_CRYPTO_OP_SECURITY_SESSION	/**< Security session crypto operation */
66 };
67 
68 /* Auxiliary flags related to IPsec offload with RTE_SECURITY */
69 
70 #define RTE_CRYPTO_OP_AUX_FLAGS_IPSEC_SOFT_EXPIRY (1 << 0)
71 /**< SA soft expiry limit has been reached */
72 
73 /**
74  * Cryptographic Operation.
75  *
76  * This structure contains data relating to performing cryptographic
77  * operations. This operation structure is used to contain any operation which
78  * is supported by the cryptodev API, PMDs should check the type parameter to
79  * verify that the operation is a support function of the device. Crypto
80  * operations are enqueued and dequeued in crypto PMDs using the
81  * rte_cryptodev_enqueue_burst() / rte_cryptodev_dequeue_burst() .
82  */
83 struct rte_crypto_op {
84 	__extension__
85 	union {
86 		uint64_t raw;
87 		__extension__
88 		struct {
89 			uint8_t type;
90 			/**< operation type */
91 			uint8_t status;
92 			/**<
93 			 * operation status - this is reset to
94 			 * RTE_CRYPTO_OP_STATUS_NOT_PROCESSED on allocation
95 			 * from mempool and will be set to
96 			 * RTE_CRYPTO_OP_STATUS_SUCCESS after crypto operation
97 			 * is successfully processed by a crypto PMD
98 			 */
99 			uint8_t sess_type;
100 			/**< operation session type */
101 			uint8_t aux_flags;
102 			/**< Operation specific auxiliary/additional flags.
103 			 * These flags carry additional information from the
104 			 * operation. Processing of the same is optional.
105 			 */
106 			uint8_t reserved[2];
107 			/**< Reserved bytes to fill 64 bits for
108 			 * future additions
109 			 */
110 			uint16_t private_data_offset;
111 			/**< Offset to indicate start of private data (if any).
112 			 * The offset is counted from the start of the
113 			 * rte_crypto_op including IV.
114 			 * The private data may be used by the application
115 			 * to store information which should remain untouched
116 			 * in the library/driver
117 			 */
118 		};
119 	};
120 	struct rte_mempool *mempool;
121 	/**< crypto operation mempool which operation is allocated from */
122 
123 	rte_iova_t phys_addr;
124 	/**< physical address of crypto operation */
125 
126 	__extension__
127 	union {
128 		struct rte_crypto_sym_op sym[0];
129 		/**< Symmetric operation parameters */
130 
131 		struct rte_crypto_asym_op asym[0];
132 		/**< Asymmetric operation parameters */
133 
134 	}; /**< operation specific parameters */
135 };
136 
137 /**
138  * Reset the fields of a crypto operation to their default values.
139  *
140  * @param	op	The crypto operation to be reset.
141  * @param	type	The crypto operation type.
142  */
143 static inline void
144 __rte_crypto_op_reset(struct rte_crypto_op *op, enum rte_crypto_op_type type)
145 {
146 	op->type = type;
147 	op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
148 	op->sess_type = RTE_CRYPTO_OP_SESSIONLESS;
149 
150 	switch (type) {
151 	case RTE_CRYPTO_OP_TYPE_SYMMETRIC:
152 		__rte_crypto_sym_op_reset(op->sym);
153 		break;
154 	case RTE_CRYPTO_OP_TYPE_ASYMMETRIC:
155 		memset(op->asym, 0, sizeof(struct rte_crypto_asym_op));
156 	break;
157 	case RTE_CRYPTO_OP_TYPE_UNDEFINED:
158 	default:
159 		break;
160 	}
161 }
162 
163 /**
164  * Private data structure belonging to a crypto symmetric operation pool.
165  */
166 struct rte_crypto_op_pool_private {
167 	enum rte_crypto_op_type type;
168 	/**< Crypto op pool type operation. */
169 	uint16_t priv_size;
170 	/**< Size of private area in each crypto operation. */
171 };
172 
173 
174 /**
175  * Returns the size of private data allocated with each rte_crypto_op object by
176  * the mempool
177  *
178  * @param	mempool	rte_crypto_op mempool
179  *
180  * @return	private data size
181  */
182 static inline uint16_t
183 __rte_crypto_op_get_priv_data_size(struct rte_mempool *mempool)
184 {
185 	struct rte_crypto_op_pool_private *priv =
186 		(struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool);
187 
188 	return priv->priv_size;
189 }
190 
191 
192 /**
193  * Creates a crypto operation pool
194  *
195  * @param	name		pool name
196  * @param	type		crypto operation type, use
197  *				RTE_CRYPTO_OP_TYPE_UNDEFINED for a pool which
198  *				supports all operation types
199  * @param	nb_elts		number of elements in pool
200  * @param	cache_size	Number of elements to cache on lcore, see
201  *				*rte_mempool_create* for further details about
202  *				cache size
203  * @param	priv_size	Size of private data to allocate with each
204  *				operation
205  * @param	socket_id	Socket to allocate memory on
206  *
207  * @return
208  *  - On success pointer to mempool
209  *  - On failure NULL
210  */
211 extern struct rte_mempool *
212 rte_crypto_op_pool_create(const char *name, enum rte_crypto_op_type type,
213 		unsigned nb_elts, unsigned cache_size, uint16_t priv_size,
214 		int socket_id);
215 
216 /**
217  * Bulk allocate raw element from mempool and return as crypto operations
218  *
219  * @param	mempool		crypto operation mempool.
220  * @param	type		crypto operation type.
221  * @param	ops		Array to place allocated crypto operations
222  * @param	nb_ops		Number of crypto operations to allocate
223  *
224  * @returns
225  * - On success returns  number of ops allocated
226  */
227 static inline int
228 __rte_crypto_op_raw_bulk_alloc(struct rte_mempool *mempool,
229 		enum rte_crypto_op_type type,
230 		struct rte_crypto_op **ops, uint16_t nb_ops)
231 {
232 	struct rte_crypto_op_pool_private *priv;
233 
234 	priv = (struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool);
235 	if (unlikely(priv->type != type &&
236 			priv->type != RTE_CRYPTO_OP_TYPE_UNDEFINED))
237 		return -EINVAL;
238 
239 	if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0)
240 		return nb_ops;
241 
242 	return 0;
243 }
244 
245 /**
246  * Allocate a crypto operation from a mempool with default parameters set
247  *
248  * @param	mempool	crypto operation mempool
249  * @param	type	operation type to allocate
250  *
251  * @returns
252  * - On success returns a valid rte_crypto_op structure
253  * - On failure returns NULL
254  */
255 static inline struct rte_crypto_op *
256 rte_crypto_op_alloc(struct rte_mempool *mempool, enum rte_crypto_op_type type)
257 {
258 	struct rte_crypto_op *op = NULL;
259 	int retval;
260 
261 	retval = __rte_crypto_op_raw_bulk_alloc(mempool, type, &op, 1);
262 	if (unlikely(retval != 1))
263 		return NULL;
264 
265 	__rte_crypto_op_reset(op, type);
266 
267 	return op;
268 }
269 
270 
271 /**
272  * Bulk allocate crypto operations from a mempool with default parameters set
273  *
274  * @param	mempool	crypto operation mempool
275  * @param	type	operation type to allocate
276  * @param	ops	Array to place allocated crypto operations
277  * @param	nb_ops	Number of crypto operations to allocate
278  *
279  * @returns
280  * - nb_ops if the number of operations requested were allocated.
281  * - 0 if the requested number of ops are not available.
282  *   None are allocated in this case.
283  */
284 
285 static inline unsigned
286 rte_crypto_op_bulk_alloc(struct rte_mempool *mempool,
287 		enum rte_crypto_op_type type,
288 		struct rte_crypto_op **ops, uint16_t nb_ops)
289 {
290 	int i;
291 
292 	if (unlikely(__rte_crypto_op_raw_bulk_alloc(mempool, type, ops, nb_ops)
293 			!= nb_ops))
294 		return 0;
295 
296 	for (i = 0; i < nb_ops; i++)
297 		__rte_crypto_op_reset(ops[i], type);
298 
299 	return nb_ops;
300 }
301 
302 
303 
304 /**
305  * Returns a pointer to the private data of a crypto operation if
306  * that operation has enough capacity for requested size.
307  *
308  * @param	op	crypto operation.
309  * @param	size	size of space requested in private data.
310  *
311  * @returns
312  * - if sufficient space available returns pointer to start of private data
313  * - if insufficient space returns NULL
314  */
315 static inline void *
316 __rte_crypto_op_get_priv_data(struct rte_crypto_op *op, uint32_t size)
317 {
318 	uint32_t priv_size;
319 
320 	if (likely(op->mempool != NULL)) {
321 		priv_size = __rte_crypto_op_get_priv_data_size(op->mempool);
322 
323 		if (likely(priv_size >= size)) {
324 			if (op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC)
325 				return (void *)((uint8_t *)(op + 1) +
326 					sizeof(struct rte_crypto_sym_op));
327 			if (op->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC)
328 				return (void *)((uint8_t *)(op + 1) +
329 					sizeof(struct rte_crypto_asym_op));
330 		}
331 	}
332 
333 	return NULL;
334 }
335 
336 /**
337  * free crypto operation structure
338  * If operation has been allocate from a rte_mempool, then the operation will
339  * be returned to the mempool.
340  *
341  * @param	op	symmetric crypto operation
342  */
343 static inline void
344 rte_crypto_op_free(struct rte_crypto_op *op)
345 {
346 	if (op != NULL && op->mempool != NULL)
347 		rte_mempool_put(op->mempool, op);
348 }
349 
350 /**
351  * Allocate a symmetric crypto operation in the private data of an mbuf.
352  *
353  * @param	m	mbuf which is associated with the crypto operation, the
354  *			operation will be allocated in the private data of that
355  *			mbuf.
356  *
357  * @returns
358  * - On success returns a pointer to the crypto operation.
359  * - On failure returns NULL.
360  */
361 static inline struct rte_crypto_op *
362 rte_crypto_sym_op_alloc_from_mbuf_priv_data(struct rte_mbuf *m)
363 {
364 	if (unlikely(m == NULL))
365 		return NULL;
366 
367 	/*
368 	 * check that the mbuf's private data size is sufficient to contain a
369 	 * crypto operation
370 	 */
371 	if (unlikely(m->priv_size < (sizeof(struct rte_crypto_op) +
372 			sizeof(struct rte_crypto_sym_op))))
373 		return NULL;
374 
375 	/* private data starts immediately after the mbuf header in the mbuf. */
376 	struct rte_crypto_op *op = (struct rte_crypto_op *)(m + 1);
377 
378 	__rte_crypto_op_reset(op, RTE_CRYPTO_OP_TYPE_SYMMETRIC);
379 
380 	op->mempool = NULL;
381 	op->sym->m_src = m;
382 
383 	return op;
384 }
385 
386 /**
387  * Allocate space for symmetric crypto xforms in the private data space of the
388  * crypto operation. This also defaults the crypto xform type and configures
389  * the chaining of the xforms in the crypto operation
390  *
391  * @return
392  * - On success returns pointer to first crypto xform in crypto operations chain
393  * - On failure returns NULL
394  */
395 static inline struct rte_crypto_sym_xform *
396 rte_crypto_op_sym_xforms_alloc(struct rte_crypto_op *op, uint8_t nb_xforms)
397 {
398 	void *priv_data;
399 	uint32_t size;
400 
401 	if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC))
402 		return NULL;
403 
404 	size = sizeof(struct rte_crypto_sym_xform) * nb_xforms;
405 
406 	priv_data = __rte_crypto_op_get_priv_data(op, size);
407 	if (priv_data == NULL)
408 		return NULL;
409 
410 	return __rte_crypto_sym_op_sym_xforms_alloc(op->sym, priv_data,
411 			nb_xforms);
412 }
413 
414 
415 /**
416  * Attach a session to a crypto operation
417  *
418  * @param	op	crypto operation, must be of type symmetric
419  * @param	sess	cryptodev session
420  */
421 static inline int
422 rte_crypto_op_attach_sym_session(struct rte_crypto_op *op,
423 		struct rte_cryptodev_sym_session *sess)
424 {
425 	if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC))
426 		return -1;
427 
428 	op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
429 
430 	return __rte_crypto_sym_op_attach_sym_session(op->sym, sess);
431 }
432 
433 /**
434  * Attach a asymmetric session to a crypto operation
435  *
436  * @param	op	crypto operation, must be of type asymmetric
437  * @param	sess	cryptodev session
438  */
439 static inline int
440 rte_crypto_op_attach_asym_session(struct rte_crypto_op *op,
441 		struct rte_cryptodev_asym_session *sess)
442 {
443 	if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_ASYMMETRIC))
444 		return -1;
445 
446 	op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
447 	op->asym->session = sess;
448 	return 0;
449 }
450 
451 #ifdef __cplusplus
452 }
453 #endif
454 
455 #endif /* _RTE_CRYPTO_H_ */
456