1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2016-2017 Intel Corporation 3 */ 4 5 #ifndef _RTE_CRYPTO_H_ 6 #define _RTE_CRYPTO_H_ 7 8 /** 9 * @file rte_crypto.h 10 * 11 * RTE Cryptography Common Definitions 12 * 13 */ 14 15 #ifdef __cplusplus 16 extern "C" { 17 #endif 18 19 20 #include <rte_mbuf.h> 21 #include <rte_memory.h> 22 #include <rte_mempool.h> 23 #include <rte_common.h> 24 25 #include "rte_crypto_sym.h" 26 #include "rte_crypto_asym.h" 27 28 /** Crypto operation types */ 29 enum rte_crypto_op_type { 30 RTE_CRYPTO_OP_TYPE_UNDEFINED, 31 /**< Undefined operation type */ 32 RTE_CRYPTO_OP_TYPE_SYMMETRIC, 33 /**< Symmetric operation */ 34 RTE_CRYPTO_OP_TYPE_ASYMMETRIC 35 /**< Asymmetric operation */ 36 }; 37 38 /** Status of crypto operation */ 39 enum rte_crypto_op_status { 40 RTE_CRYPTO_OP_STATUS_SUCCESS, 41 /**< Operation completed successfully */ 42 RTE_CRYPTO_OP_STATUS_NOT_PROCESSED, 43 /**< Operation has not yet been processed by a crypto device */ 44 RTE_CRYPTO_OP_STATUS_AUTH_FAILED, 45 /**< Authentication verification failed */ 46 RTE_CRYPTO_OP_STATUS_INVALID_SESSION, 47 /**< 48 * Symmetric operation failed due to invalid session arguments, or if 49 * in session-less mode, failed to allocate private operation material. 50 */ 51 RTE_CRYPTO_OP_STATUS_INVALID_ARGS, 52 /**< Operation failed due to invalid arguments in request */ 53 RTE_CRYPTO_OP_STATUS_ERROR, 54 /**< Error handling operation */ 55 }; 56 57 /** 58 * Crypto operation session type. This is used to specify whether a crypto 59 * operation has session structure attached for immutable parameters or if all 60 * operation information is included in the operation data structure. 61 */ 62 enum rte_crypto_op_sess_type { 63 RTE_CRYPTO_OP_WITH_SESSION, /**< Session based crypto operation */ 64 RTE_CRYPTO_OP_SESSIONLESS, /**< Session-less crypto operation */ 65 RTE_CRYPTO_OP_SECURITY_SESSION /**< Security session crypto operation */ 66 }; 67 68 /* Auxiliary flags related to IPsec offload with RTE_SECURITY */ 69 70 #define RTE_CRYPTO_OP_AUX_FLAGS_IPSEC_SOFT_EXPIRY (1 << 0) 71 /**< SA soft expiry limit has been reached */ 72 73 /** 74 * Cryptographic Operation. 75 * 76 * This structure contains data relating to performing cryptographic 77 * operations. This operation structure is used to contain any operation which 78 * is supported by the cryptodev API, PMDs should check the type parameter to 79 * verify that the operation is a support function of the device. Crypto 80 * operations are enqueued and dequeued in crypto PMDs using the 81 * rte_cryptodev_enqueue_burst() / rte_cryptodev_dequeue_burst() . 82 */ 83 struct rte_crypto_op { 84 __extension__ 85 union { 86 uint64_t raw; 87 __extension__ 88 struct { 89 uint8_t type; 90 /**< operation type */ 91 uint8_t status; 92 /**< 93 * operation status - this is reset to 94 * RTE_CRYPTO_OP_STATUS_NOT_PROCESSED on allocation 95 * from mempool and will be set to 96 * RTE_CRYPTO_OP_STATUS_SUCCESS after crypto operation 97 * is successfully processed by a crypto PMD 98 */ 99 uint8_t sess_type; 100 /**< operation session type */ 101 uint8_t aux_flags; 102 /**< Operation specific auxiliary/additional flags. 103 * These flags carry additional information from the 104 * operation. Processing of the same is optional. 105 */ 106 uint8_t reserved[2]; 107 /**< Reserved bytes to fill 64 bits for 108 * future additions 109 */ 110 uint16_t private_data_offset; 111 /**< Offset to indicate start of private data (if any). 112 * The offset is counted from the start of the 113 * rte_crypto_op including IV. 114 * The private data may be used by the application 115 * to store information which should remain untouched 116 * in the library/driver 117 */ 118 }; 119 }; 120 struct rte_mempool *mempool; 121 /**< crypto operation mempool which operation is allocated from */ 122 123 rte_iova_t phys_addr; 124 /**< physical address of crypto operation */ 125 126 /* empty structures do not have zero size in C++ leading to compilation errors 127 * with clang about structure/union having different sizes in C and C++. 128 * While things are clearer with an explicit union, since each field is 129 * zero-sized it's not actually needed, so omit it for C++ 130 */ 131 #ifndef __cplusplus 132 __extension__ 133 union { 134 #endif 135 struct rte_crypto_sym_op sym[0]; 136 /**< Symmetric operation parameters */ 137 138 struct rte_crypto_asym_op asym[0]; 139 /**< Asymmetric operation parameters */ 140 141 #ifndef __cplusplus 142 }; /**< operation specific parameters */ 143 #endif 144 }; 145 146 /** 147 * Reset the fields of a crypto operation to their default values. 148 * 149 * @param op The crypto operation to be reset. 150 * @param type The crypto operation type. 151 */ 152 static inline void 153 __rte_crypto_op_reset(struct rte_crypto_op *op, enum rte_crypto_op_type type) 154 { 155 op->type = type; 156 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED; 157 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS; 158 159 switch (type) { 160 case RTE_CRYPTO_OP_TYPE_SYMMETRIC: 161 __rte_crypto_sym_op_reset(op->sym); 162 break; 163 case RTE_CRYPTO_OP_TYPE_ASYMMETRIC: 164 memset(op->asym, 0, sizeof(struct rte_crypto_asym_op)); 165 break; 166 case RTE_CRYPTO_OP_TYPE_UNDEFINED: 167 default: 168 break; 169 } 170 } 171 172 /** 173 * Private data structure belonging to a crypto symmetric operation pool. 174 */ 175 struct rte_crypto_op_pool_private { 176 enum rte_crypto_op_type type; 177 /**< Crypto op pool type operation. */ 178 uint16_t priv_size; 179 /**< Size of private area in each crypto operation. */ 180 }; 181 182 183 /** 184 * Returns the size of private data allocated with each rte_crypto_op object by 185 * the mempool 186 * 187 * @param mempool rte_crypto_op mempool 188 * 189 * @return private data size 190 */ 191 static inline uint16_t 192 __rte_crypto_op_get_priv_data_size(struct rte_mempool *mempool) 193 { 194 struct rte_crypto_op_pool_private *priv = 195 (struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool); 196 197 return priv->priv_size; 198 } 199 200 201 /** 202 * Creates a crypto operation pool 203 * 204 * @param name pool name 205 * @param type crypto operation type, use 206 * RTE_CRYPTO_OP_TYPE_UNDEFINED for a pool which 207 * supports all operation types 208 * @param nb_elts number of elements in pool 209 * @param cache_size Number of elements to cache on lcore, see 210 * *rte_mempool_create* for further details about 211 * cache size 212 * @param priv_size Size of private data to allocate with each 213 * operation 214 * @param socket_id Socket to allocate memory on 215 * 216 * @return 217 * - On success pointer to mempool 218 * - On failure NULL 219 */ 220 extern struct rte_mempool * 221 rte_crypto_op_pool_create(const char *name, enum rte_crypto_op_type type, 222 unsigned nb_elts, unsigned cache_size, uint16_t priv_size, 223 int socket_id); 224 225 /** 226 * Bulk allocate raw element from mempool and return as crypto operations 227 * 228 * @param mempool crypto operation mempool. 229 * @param type crypto operation type. 230 * @param ops Array to place allocated crypto operations 231 * @param nb_ops Number of crypto operations to allocate 232 * 233 * @returns 234 * - On success returns number of ops allocated 235 */ 236 static inline int 237 __rte_crypto_op_raw_bulk_alloc(struct rte_mempool *mempool, 238 enum rte_crypto_op_type type, 239 struct rte_crypto_op **ops, uint16_t nb_ops) 240 { 241 struct rte_crypto_op_pool_private *priv; 242 243 priv = (struct rte_crypto_op_pool_private *) rte_mempool_get_priv(mempool); 244 if (unlikely(priv->type != type && 245 priv->type != RTE_CRYPTO_OP_TYPE_UNDEFINED)) 246 return -EINVAL; 247 248 if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0) 249 return nb_ops; 250 251 return 0; 252 } 253 254 /** 255 * Allocate a crypto operation from a mempool with default parameters set 256 * 257 * @param mempool crypto operation mempool 258 * @param type operation type to allocate 259 * 260 * @returns 261 * - On success returns a valid rte_crypto_op structure 262 * - On failure returns NULL 263 */ 264 static inline struct rte_crypto_op * 265 rte_crypto_op_alloc(struct rte_mempool *mempool, enum rte_crypto_op_type type) 266 { 267 struct rte_crypto_op *op = NULL; 268 int retval; 269 270 retval = __rte_crypto_op_raw_bulk_alloc(mempool, type, &op, 1); 271 if (unlikely(retval != 1)) 272 return NULL; 273 274 __rte_crypto_op_reset(op, type); 275 276 return op; 277 } 278 279 280 /** 281 * Bulk allocate crypto operations from a mempool with default parameters set 282 * 283 * @param mempool crypto operation mempool 284 * @param type operation type to allocate 285 * @param ops Array to place allocated crypto operations 286 * @param nb_ops Number of crypto operations to allocate 287 * 288 * @returns 289 * - nb_ops if the number of operations requested were allocated. 290 * - 0 if the requested number of ops are not available. 291 * None are allocated in this case. 292 */ 293 294 static inline unsigned 295 rte_crypto_op_bulk_alloc(struct rte_mempool *mempool, 296 enum rte_crypto_op_type type, 297 struct rte_crypto_op **ops, uint16_t nb_ops) 298 { 299 int i; 300 301 if (unlikely(__rte_crypto_op_raw_bulk_alloc(mempool, type, ops, nb_ops) 302 != nb_ops)) 303 return 0; 304 305 for (i = 0; i < nb_ops; i++) 306 __rte_crypto_op_reset(ops[i], type); 307 308 return nb_ops; 309 } 310 311 312 313 /** 314 * Returns a pointer to the private data of a crypto operation if 315 * that operation has enough capacity for requested size. 316 * 317 * @param op crypto operation. 318 * @param size size of space requested in private data. 319 * 320 * @returns 321 * - if sufficient space available returns pointer to start of private data 322 * - if insufficient space returns NULL 323 */ 324 static inline void * 325 __rte_crypto_op_get_priv_data(struct rte_crypto_op *op, uint32_t size) 326 { 327 uint32_t priv_size; 328 329 if (likely(op->mempool != NULL)) { 330 priv_size = __rte_crypto_op_get_priv_data_size(op->mempool); 331 332 if (likely(priv_size >= size)) { 333 if (op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC) 334 return (void *)((uint8_t *)(op + 1) + 335 sizeof(struct rte_crypto_sym_op)); 336 if (op->type == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) 337 return (void *)((uint8_t *)(op + 1) + 338 sizeof(struct rte_crypto_asym_op)); 339 } 340 } 341 342 return NULL; 343 } 344 345 /** 346 * free crypto operation structure 347 * If operation has been allocate from a rte_mempool, then the operation will 348 * be returned to the mempool. 349 * 350 * @param op 351 * Pointer to symmetric crypto operation allocated with rte_crypto_op_alloc() 352 * If op is NULL, no operation is performed. 353 */ 354 static inline void 355 rte_crypto_op_free(struct rte_crypto_op *op) 356 { 357 if (op != NULL && op->mempool != NULL) 358 rte_mempool_put(op->mempool, op); 359 } 360 361 /** 362 * Allocate a symmetric crypto operation in the private data of an mbuf. 363 * 364 * @param m mbuf which is associated with the crypto operation, the 365 * operation will be allocated in the private data of that 366 * mbuf. 367 * 368 * @returns 369 * - On success returns a pointer to the crypto operation. 370 * - On failure returns NULL. 371 */ 372 static inline struct rte_crypto_op * 373 rte_crypto_sym_op_alloc_from_mbuf_priv_data(struct rte_mbuf *m) 374 { 375 if (unlikely(m == NULL)) 376 return NULL; 377 378 /* 379 * check that the mbuf's private data size is sufficient to contain a 380 * crypto operation 381 */ 382 if (unlikely(m->priv_size < (sizeof(struct rte_crypto_op) + 383 sizeof(struct rte_crypto_sym_op)))) 384 return NULL; 385 386 /* private data starts immediately after the mbuf header in the mbuf. */ 387 struct rte_crypto_op *op = (struct rte_crypto_op *)(m + 1); 388 389 __rte_crypto_op_reset(op, RTE_CRYPTO_OP_TYPE_SYMMETRIC); 390 391 op->mempool = NULL; 392 op->sym->m_src = m; 393 394 return op; 395 } 396 397 /** 398 * Allocate space for symmetric crypto xforms in the private data space of the 399 * crypto operation. This also defaults the crypto xform type and configures 400 * the chaining of the xforms in the crypto operation 401 * 402 * @return 403 * - On success returns pointer to first crypto xform in crypto operations chain 404 * - On failure returns NULL 405 */ 406 static inline struct rte_crypto_sym_xform * 407 rte_crypto_op_sym_xforms_alloc(struct rte_crypto_op *op, uint8_t nb_xforms) 408 { 409 void *priv_data; 410 uint32_t size; 411 412 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC)) 413 return NULL; 414 415 size = sizeof(struct rte_crypto_sym_xform) * nb_xforms; 416 417 priv_data = __rte_crypto_op_get_priv_data(op, size); 418 if (priv_data == NULL) 419 return NULL; 420 421 return __rte_crypto_sym_op_sym_xforms_alloc(op->sym, priv_data, 422 nb_xforms); 423 } 424 425 426 /** 427 * Attach a session to a crypto operation 428 * 429 * @param op crypto operation, must be of type symmetric 430 * @param sess cryptodev session 431 */ 432 static inline int 433 rte_crypto_op_attach_sym_session(struct rte_crypto_op *op, 434 struct rte_cryptodev_sym_session *sess) 435 { 436 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_SYMMETRIC)) 437 return -1; 438 439 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION; 440 441 return __rte_crypto_sym_op_attach_sym_session(op->sym, sess); 442 } 443 444 /** 445 * Attach a asymmetric session to a crypto operation 446 * 447 * @param op crypto operation, must be of type asymmetric 448 * @param sess cryptodev session 449 */ 450 static inline int 451 rte_crypto_op_attach_asym_session(struct rte_crypto_op *op, 452 struct rte_cryptodev_asym_session *sess) 453 { 454 if (unlikely(op->type != RTE_CRYPTO_OP_TYPE_ASYMMETRIC)) 455 return -1; 456 457 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION; 458 op->asym->session = sess; 459 return 0; 460 } 461 462 #ifdef __cplusplus 463 } 464 #endif 465 466 #endif /* _RTE_CRYPTO_H_ */ 467