1 /* 2 * BSD LICENSE 3 * 4 * Copyright (C) Cavium networks Ltd. 2017. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 10 * * Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * * Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in 14 * the documentation and/or other materials provided with the 15 * distribution. 16 * * Neither the name of Cavium networks nor the names of its 17 * contributors may be used to endorse or promote products derived 18 * from this software without specific prior written permission. 19 * 20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 */ 32 33 #include <string.h> 34 35 #include <rte_common.h> 36 #include <rte_malloc.h> 37 #include <rte_cryptodev_pmd.h> 38 39 #include "armv8_crypto_defs.h" 40 41 #include "rte_armv8_pmd_private.h" 42 43 static const struct rte_cryptodev_capabilities 44 armv8_crypto_pmd_capabilities[] = { 45 { /* SHA1 HMAC */ 46 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, 47 {.sym = { 48 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, 49 {.auth = { 50 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC, 51 .block_size = 64, 52 .key_size = { 53 .min = 16, 54 .max = 128, 55 .increment = 0 56 }, 57 .digest_size = { 58 .min = 20, 59 .max = 20, 60 .increment = 0 61 }, 62 .aad_size = { 0 }, 63 .iv_size = { 0 } 64 }, } 65 }, } 66 }, 67 { /* SHA256 HMAC */ 68 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, 69 {.sym = { 70 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH, 71 {.auth = { 72 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC, 73 .block_size = 64, 74 .key_size = { 75 .min = 16, 76 .max = 128, 77 .increment = 0 78 }, 79 .digest_size = { 80 .min = 32, 81 .max = 32, 82 .increment = 0 83 }, 84 .aad_size = { 0 }, 85 .iv_size = { 0 } 86 }, } 87 }, } 88 }, 89 { /* AES CBC */ 90 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC, 91 {.sym = { 92 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER, 93 {.cipher = { 94 .algo = RTE_CRYPTO_CIPHER_AES_CBC, 95 .block_size = 16, 96 .key_size = { 97 .min = 16, 98 .max = 16, 99 .increment = 0 100 }, 101 .iv_size = { 102 .min = 16, 103 .max = 16, 104 .increment = 0 105 } 106 }, } 107 }, } 108 }, 109 110 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST() 111 }; 112 113 114 /** Configure device */ 115 static int 116 armv8_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev, 117 __rte_unused struct rte_cryptodev_config *config) 118 { 119 return 0; 120 } 121 122 /** Start device */ 123 static int 124 armv8_crypto_pmd_start(__rte_unused struct rte_cryptodev *dev) 125 { 126 return 0; 127 } 128 129 /** Stop device */ 130 static void 131 armv8_crypto_pmd_stop(__rte_unused struct rte_cryptodev *dev) 132 { 133 } 134 135 /** Close device */ 136 static int 137 armv8_crypto_pmd_close(__rte_unused struct rte_cryptodev *dev) 138 { 139 return 0; 140 } 141 142 143 /** Get device statistics */ 144 static void 145 armv8_crypto_pmd_stats_get(struct rte_cryptodev *dev, 146 struct rte_cryptodev_stats *stats) 147 { 148 int qp_id; 149 150 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) { 151 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id]; 152 153 stats->enqueued_count += qp->stats.enqueued_count; 154 stats->dequeued_count += qp->stats.dequeued_count; 155 156 stats->enqueue_err_count += qp->stats.enqueue_err_count; 157 stats->dequeue_err_count += qp->stats.dequeue_err_count; 158 } 159 } 160 161 /** Reset device statistics */ 162 static void 163 armv8_crypto_pmd_stats_reset(struct rte_cryptodev *dev) 164 { 165 int qp_id; 166 167 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) { 168 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id]; 169 170 memset(&qp->stats, 0, sizeof(qp->stats)); 171 } 172 } 173 174 175 /** Get device info */ 176 static void 177 armv8_crypto_pmd_info_get(struct rte_cryptodev *dev, 178 struct rte_cryptodev_info *dev_info) 179 { 180 struct armv8_crypto_private *internals = dev->data->dev_private; 181 182 if (dev_info != NULL) { 183 dev_info->driver_id = dev->driver_id; 184 dev_info->feature_flags = dev->feature_flags; 185 dev_info->capabilities = armv8_crypto_pmd_capabilities; 186 dev_info->max_nb_queue_pairs = internals->max_nb_qpairs; 187 dev_info->sym.max_nb_sessions = internals->max_nb_sessions; 188 } 189 } 190 191 /** Release queue pair */ 192 static int 193 armv8_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id) 194 { 195 196 if (dev->data->queue_pairs[qp_id] != NULL) { 197 rte_free(dev->data->queue_pairs[qp_id]); 198 dev->data->queue_pairs[qp_id] = NULL; 199 } 200 201 return 0; 202 } 203 204 /** set a unique name for the queue pair based on it's name, dev_id and qp_id */ 205 static int 206 armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev *dev, 207 struct armv8_crypto_qp *qp) 208 { 209 unsigned int n; 210 211 n = snprintf(qp->name, sizeof(qp->name), "armv8_crypto_pmd_%u_qp_%u", 212 dev->data->dev_id, qp->id); 213 214 if (n > sizeof(qp->name)) 215 return -1; 216 217 return 0; 218 } 219 220 221 /** Create a ring to place processed operations on */ 222 static struct rte_ring * 223 armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp *qp, 224 unsigned int ring_size, int socket_id) 225 { 226 struct rte_ring *r; 227 228 r = rte_ring_lookup(qp->name); 229 if (r) { 230 if (rte_ring_get_size(r) >= ring_size) { 231 ARMV8_CRYPTO_LOG_INFO( 232 "Reusing existing ring %s for processed ops", 233 qp->name); 234 return r; 235 } 236 237 ARMV8_CRYPTO_LOG_ERR( 238 "Unable to reuse existing ring %s for processed ops", 239 qp->name); 240 return NULL; 241 } 242 243 return rte_ring_create(qp->name, ring_size, socket_id, 244 RING_F_SP_ENQ | RING_F_SC_DEQ); 245 } 246 247 248 /** Setup a queue pair */ 249 static int 250 armv8_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id, 251 const struct rte_cryptodev_qp_conf *qp_conf, 252 int socket_id) 253 { 254 struct armv8_crypto_qp *qp = NULL; 255 256 /* Free memory prior to re-allocation if needed. */ 257 if (dev->data->queue_pairs[qp_id] != NULL) 258 armv8_crypto_pmd_qp_release(dev, qp_id); 259 260 /* Allocate the queue pair data structure. */ 261 qp = rte_zmalloc_socket("ARMv8 PMD Queue Pair", sizeof(*qp), 262 RTE_CACHE_LINE_SIZE, socket_id); 263 if (qp == NULL) 264 return -ENOMEM; 265 266 qp->id = qp_id; 267 dev->data->queue_pairs[qp_id] = qp; 268 269 if (armv8_crypto_pmd_qp_set_unique_name(dev, qp) != 0) 270 goto qp_setup_cleanup; 271 272 qp->processed_ops = armv8_crypto_pmd_qp_create_processed_ops_ring(qp, 273 qp_conf->nb_descriptors, socket_id); 274 if (qp->processed_ops == NULL) 275 goto qp_setup_cleanup; 276 277 qp->sess_mp = dev->data->session_pool; 278 279 memset(&qp->stats, 0, sizeof(qp->stats)); 280 281 return 0; 282 283 qp_setup_cleanup: 284 if (qp) 285 rte_free(qp); 286 287 return -1; 288 } 289 290 /** Start queue pair */ 291 static int 292 armv8_crypto_pmd_qp_start(__rte_unused struct rte_cryptodev *dev, 293 __rte_unused uint16_t queue_pair_id) 294 { 295 return -ENOTSUP; 296 } 297 298 /** Stop queue pair */ 299 static int 300 armv8_crypto_pmd_qp_stop(__rte_unused struct rte_cryptodev *dev, 301 __rte_unused uint16_t queue_pair_id) 302 { 303 return -ENOTSUP; 304 } 305 306 /** Return the number of allocated queue pairs */ 307 static uint32_t 308 armv8_crypto_pmd_qp_count(struct rte_cryptodev *dev) 309 { 310 return dev->data->nb_queue_pairs; 311 } 312 313 /** Returns the size of the session structure */ 314 static unsigned 315 armv8_crypto_pmd_session_get_size(struct rte_cryptodev *dev __rte_unused) 316 { 317 return sizeof(struct armv8_crypto_session); 318 } 319 320 /** Configure the session from a crypto xform chain */ 321 static int 322 armv8_crypto_pmd_session_configure(struct rte_cryptodev *dev, 323 struct rte_crypto_sym_xform *xform, 324 struct rte_cryptodev_sym_session *sess, 325 struct rte_mempool *mempool) 326 { 327 void *sess_private_data; 328 329 if (unlikely(sess == NULL)) { 330 ARMV8_CRYPTO_LOG_ERR("invalid session struct"); 331 return -1; 332 } 333 334 if (rte_mempool_get(mempool, &sess_private_data)) { 335 CDEV_LOG_ERR( 336 "Couldn't get object from session mempool"); 337 return -1; 338 } 339 340 if (armv8_crypto_set_session_parameters(sess_private_data, xform) != 0) { 341 ARMV8_CRYPTO_LOG_ERR("failed configure session parameters"); 342 343 /* Return session to mempool */ 344 rte_mempool_put(mempool, sess_private_data); 345 return -1; 346 } 347 348 set_session_private_data(sess, dev->driver_id, 349 sess_private_data); 350 351 return 0; 352 } 353 354 /** Clear the memory of session so it doesn't leave key material behind */ 355 static void 356 armv8_crypto_pmd_session_clear(struct rte_cryptodev *dev, 357 struct rte_cryptodev_sym_session *sess) 358 { 359 uint8_t index = dev->driver_id; 360 void *sess_priv = get_session_private_data(sess, index); 361 362 /* Zero out the whole structure */ 363 if (sess_priv) { 364 memset(sess_priv, 0, sizeof(struct armv8_crypto_session)); 365 struct rte_mempool *sess_mp = rte_mempool_from_obj(sess_priv); 366 set_session_private_data(sess, index, NULL); 367 rte_mempool_put(sess_mp, sess_priv); 368 } 369 } 370 371 struct rte_cryptodev_ops armv8_crypto_pmd_ops = { 372 .dev_configure = armv8_crypto_pmd_config, 373 .dev_start = armv8_crypto_pmd_start, 374 .dev_stop = armv8_crypto_pmd_stop, 375 .dev_close = armv8_crypto_pmd_close, 376 377 .stats_get = armv8_crypto_pmd_stats_get, 378 .stats_reset = armv8_crypto_pmd_stats_reset, 379 380 .dev_infos_get = armv8_crypto_pmd_info_get, 381 382 .queue_pair_setup = armv8_crypto_pmd_qp_setup, 383 .queue_pair_release = armv8_crypto_pmd_qp_release, 384 .queue_pair_start = armv8_crypto_pmd_qp_start, 385 .queue_pair_stop = armv8_crypto_pmd_qp_stop, 386 .queue_pair_count = armv8_crypto_pmd_qp_count, 387 388 .session_get_size = armv8_crypto_pmd_session_get_size, 389 .session_configure = armv8_crypto_pmd_session_configure, 390 .session_clear = armv8_crypto_pmd_session_clear 391 }; 392 393 struct rte_cryptodev_ops *rte_armv8_crypto_pmd_ops = &armv8_crypto_pmd_ops; 394