1 /*- 2 * BSD LICENSE 3 * 4 * Copyright(c) 2016-2017 Intel Corporation. All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 10 * * Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * * Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in 14 * the documentation and/or other materials provided with the 15 * distribution. 16 * * Neither the name of Intel Corporation nor the names of its 17 * contributors may be used to endorse or promote products derived 18 * from this software without specific prior written permission. 19 * 20 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 */ 32 33 #include <rte_cryptodev.h> 34 35 #include "cperf_ops.h" 36 #include "cperf_test_vectors.h" 37 38 static int 39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops, 40 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 41 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 42 const struct cperf_options *options, 43 const struct cperf_test_vector *test_vector __rte_unused) 44 { 45 uint16_t i; 46 47 for (i = 0; i < nb_ops; i++) { 48 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 49 50 rte_crypto_op_attach_sym_session(ops[i], sess); 51 52 sym_op->m_src = bufs_in[i]; 53 sym_op->m_dst = bufs_out[i]; 54 55 /* cipher parameters */ 56 sym_op->cipher.data.length = options->buffer_sz; 57 sym_op->cipher.data.offset = 0; 58 } 59 60 return 0; 61 } 62 63 static int 64 cperf_set_ops_null_auth(struct rte_crypto_op **ops, 65 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 66 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 67 const struct cperf_options *options, 68 const struct cperf_test_vector *test_vector __rte_unused) 69 { 70 uint16_t i; 71 72 for (i = 0; i < nb_ops; i++) { 73 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 74 75 rte_crypto_op_attach_sym_session(ops[i], sess); 76 77 sym_op->m_src = bufs_in[i]; 78 sym_op->m_dst = bufs_out[i]; 79 80 /* auth parameters */ 81 sym_op->auth.data.length = options->buffer_sz; 82 sym_op->auth.data.offset = 0; 83 } 84 85 return 0; 86 } 87 88 static int 89 cperf_set_ops_cipher(struct rte_crypto_op **ops, 90 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 91 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 92 const struct cperf_options *options, 93 const struct cperf_test_vector *test_vector) 94 { 95 uint16_t i; 96 97 for (i = 0; i < nb_ops; i++) { 98 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 99 100 rte_crypto_op_attach_sym_session(ops[i], sess); 101 102 sym_op->m_src = bufs_in[i]; 103 sym_op->m_dst = bufs_out[i]; 104 105 /* cipher parameters */ 106 sym_op->cipher.iv.data = test_vector->iv.data; 107 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr; 108 sym_op->cipher.iv.length = test_vector->iv.length; 109 110 sym_op->cipher.data.length = options->buffer_sz; 111 sym_op->cipher.data.offset = 0; 112 } 113 114 return 0; 115 } 116 117 static int 118 cperf_set_ops_auth(struct rte_crypto_op **ops, 119 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 120 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 121 const struct cperf_options *options, 122 const struct cperf_test_vector *test_vector) 123 { 124 uint16_t i; 125 126 for (i = 0; i < nb_ops; i++) { 127 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 128 129 rte_crypto_op_attach_sym_session(ops[i], sess); 130 131 sym_op->m_src = bufs_in[i]; 132 sym_op->m_dst = bufs_out[i]; 133 134 /* authentication parameters */ 135 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 136 sym_op->auth.digest.data = test_vector->digest.data; 137 sym_op->auth.digest.phys_addr = 138 test_vector->digest.phys_addr; 139 sym_op->auth.digest.length = options->auth_digest_sz; 140 } else { 141 142 uint32_t offset = options->buffer_sz; 143 struct rte_mbuf *buf, *tbuf; 144 145 if (options->out_of_place) { 146 buf = bufs_out[i]; 147 } else { 148 buf = bufs_in[i]; 149 150 tbuf = buf; 151 while ((tbuf->next != NULL) && 152 (offset >= tbuf->data_len)) { 153 offset -= tbuf->data_len; 154 tbuf = tbuf->next; 155 } 156 } 157 158 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 159 uint8_t *, offset); 160 sym_op->auth.digest.phys_addr = 161 rte_pktmbuf_mtophys_offset(buf, offset); 162 sym_op->auth.digest.length = options->auth_digest_sz; 163 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr; 164 sym_op->auth.aad.data = test_vector->aad.data; 165 sym_op->auth.aad.length = options->auth_aad_sz; 166 167 } 168 169 sym_op->auth.data.length = options->buffer_sz; 170 sym_op->auth.data.offset = 0; 171 } 172 173 return 0; 174 } 175 176 static int 177 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops, 178 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 179 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 180 const struct cperf_options *options, 181 const struct cperf_test_vector *test_vector) 182 { 183 uint16_t i; 184 185 for (i = 0; i < nb_ops; i++) { 186 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 187 188 rte_crypto_op_attach_sym_session(ops[i], sess); 189 190 sym_op->m_src = bufs_in[i]; 191 sym_op->m_dst = bufs_out[i]; 192 193 /* cipher parameters */ 194 sym_op->cipher.iv.data = test_vector->iv.data; 195 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr; 196 sym_op->cipher.iv.length = test_vector->iv.length; 197 198 sym_op->cipher.data.length = options->buffer_sz; 199 sym_op->cipher.data.offset = 0; 200 201 /* authentication parameters */ 202 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 203 sym_op->auth.digest.data = test_vector->digest.data; 204 sym_op->auth.digest.phys_addr = 205 test_vector->digest.phys_addr; 206 sym_op->auth.digest.length = options->auth_digest_sz; 207 } else { 208 209 uint32_t offset = options->buffer_sz; 210 struct rte_mbuf *buf, *tbuf; 211 212 if (options->out_of_place) { 213 buf = bufs_out[i]; 214 } else { 215 buf = bufs_in[i]; 216 217 tbuf = buf; 218 while ((tbuf->next != NULL) && 219 (offset >= tbuf->data_len)) { 220 offset -= tbuf->data_len; 221 tbuf = tbuf->next; 222 } 223 } 224 225 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 226 uint8_t *, offset); 227 sym_op->auth.digest.phys_addr = 228 rte_pktmbuf_mtophys_offset(buf, offset); 229 sym_op->auth.digest.length = options->auth_digest_sz; 230 sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr; 231 sym_op->auth.aad.data = test_vector->aad.data; 232 sym_op->auth.aad.length = options->auth_aad_sz; 233 } 234 235 sym_op->auth.data.length = options->buffer_sz; 236 sym_op->auth.data.offset = 0; 237 } 238 239 return 0; 240 } 241 242 static int 243 cperf_set_ops_aead(struct rte_crypto_op **ops, 244 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out, 245 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess, 246 const struct cperf_options *options, 247 const struct cperf_test_vector *test_vector) 248 { 249 uint16_t i; 250 251 for (i = 0; i < nb_ops; i++) { 252 struct rte_crypto_sym_op *sym_op = ops[i]->sym; 253 254 rte_crypto_op_attach_sym_session(ops[i], sess); 255 256 sym_op->m_src = bufs_in[i]; 257 sym_op->m_dst = bufs_out[i]; 258 259 /* cipher parameters */ 260 sym_op->cipher.iv.data = test_vector->iv.data; 261 sym_op->cipher.iv.phys_addr = test_vector->iv.phys_addr; 262 sym_op->cipher.iv.length = test_vector->iv.length; 263 264 sym_op->cipher.data.length = options->buffer_sz; 265 sym_op->cipher.data.offset = 266 RTE_ALIGN_CEIL(options->auth_aad_sz, 16); 267 268 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *); 269 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]); 270 sym_op->auth.aad.length = options->auth_aad_sz; 271 272 /* authentication parameters */ 273 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) { 274 sym_op->auth.digest.data = test_vector->digest.data; 275 sym_op->auth.digest.phys_addr = 276 test_vector->digest.phys_addr; 277 sym_op->auth.digest.length = options->auth_digest_sz; 278 } else { 279 280 uint32_t offset = sym_op->cipher.data.length + 281 sym_op->cipher.data.offset; 282 struct rte_mbuf *buf, *tbuf; 283 284 if (options->out_of_place) { 285 buf = bufs_out[i]; 286 } else { 287 buf = bufs_in[i]; 288 289 tbuf = buf; 290 while ((tbuf->next != NULL) && 291 (offset >= tbuf->data_len)) { 292 offset -= tbuf->data_len; 293 tbuf = tbuf->next; 294 } 295 } 296 297 sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf, 298 uint8_t *, offset); 299 sym_op->auth.digest.phys_addr = 300 rte_pktmbuf_mtophys_offset(buf, offset); 301 302 sym_op->auth.digest.length = options->auth_digest_sz; 303 } 304 305 sym_op->auth.data.length = options->buffer_sz; 306 sym_op->auth.data.offset = options->auth_aad_sz; 307 } 308 309 return 0; 310 } 311 312 static struct rte_cryptodev_sym_session * 313 cperf_create_session(uint8_t dev_id, 314 const struct cperf_options *options, 315 const struct cperf_test_vector *test_vector) 316 { 317 struct rte_crypto_sym_xform cipher_xform; 318 struct rte_crypto_sym_xform auth_xform; 319 struct rte_cryptodev_sym_session *sess = NULL; 320 321 /* 322 * cipher only 323 */ 324 if (options->op_type == CPERF_CIPHER_ONLY) { 325 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 326 cipher_xform.next = NULL; 327 cipher_xform.cipher.algo = options->cipher_algo; 328 cipher_xform.cipher.op = options->cipher_op; 329 330 /* cipher different than null */ 331 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 332 cipher_xform.cipher.key.data = 333 test_vector->cipher_key.data; 334 cipher_xform.cipher.key.length = 335 test_vector->cipher_key.length; 336 } 337 /* create crypto session */ 338 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform); 339 /* 340 * auth only 341 */ 342 } else if (options->op_type == CPERF_AUTH_ONLY) { 343 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 344 auth_xform.next = NULL; 345 auth_xform.auth.algo = options->auth_algo; 346 auth_xform.auth.op = options->auth_op; 347 348 /* auth different than null */ 349 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 350 auth_xform.auth.digest_length = 351 options->auth_digest_sz; 352 auth_xform.auth.add_auth_data_length = 353 options->auth_aad_sz; 354 auth_xform.auth.key.length = 355 test_vector->auth_key.length; 356 auth_xform.auth.key.data = test_vector->auth_key.data; 357 } 358 /* create crypto session */ 359 sess = rte_cryptodev_sym_session_create(dev_id, &auth_xform); 360 /* 361 * cipher and auth 362 */ 363 } else if (options->op_type == CPERF_CIPHER_THEN_AUTH 364 || options->op_type == CPERF_AUTH_THEN_CIPHER 365 || options->op_type == CPERF_AEAD) { 366 367 /* 368 * cipher 369 */ 370 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 371 cipher_xform.next = NULL; 372 cipher_xform.cipher.algo = options->cipher_algo; 373 cipher_xform.cipher.op = options->cipher_op; 374 375 /* cipher different than null */ 376 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) { 377 cipher_xform.cipher.key.data = 378 test_vector->cipher_key.data; 379 cipher_xform.cipher.key.length = 380 test_vector->cipher_key.length; 381 } 382 383 /* 384 * auth 385 */ 386 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH; 387 auth_xform.next = NULL; 388 auth_xform.auth.algo = options->auth_algo; 389 auth_xform.auth.op = options->auth_op; 390 391 /* auth different than null */ 392 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) { 393 auth_xform.auth.digest_length = options->auth_digest_sz; 394 auth_xform.auth.add_auth_data_length = 395 options->auth_aad_sz; 396 /* auth options for aes gcm */ 397 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM && 398 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) { 399 auth_xform.auth.key.length = 0; 400 auth_xform.auth.key.data = NULL; 401 } else { /* auth options for others */ 402 auth_xform.auth.key.length = 403 test_vector->auth_key.length; 404 auth_xform.auth.key.data = 405 test_vector->auth_key.data; 406 } 407 } 408 409 /* create crypto session for aes gcm */ 410 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) { 411 if (options->cipher_op == 412 RTE_CRYPTO_CIPHER_OP_ENCRYPT) { 413 cipher_xform.next = &auth_xform; 414 /* create crypto session */ 415 sess = rte_cryptodev_sym_session_create(dev_id, 416 &cipher_xform); 417 } else { /* decrypt */ 418 auth_xform.next = &cipher_xform; 419 /* create crypto session */ 420 sess = rte_cryptodev_sym_session_create(dev_id, 421 &auth_xform); 422 } 423 } else { /* create crypto session for other */ 424 /* cipher then auth */ 425 if (options->op_type == CPERF_CIPHER_THEN_AUTH) { 426 cipher_xform.next = &auth_xform; 427 /* create crypto session */ 428 sess = rte_cryptodev_sym_session_create(dev_id, 429 &cipher_xform); 430 } else { /* auth then cipher */ 431 auth_xform.next = &cipher_xform; 432 /* create crypto session */ 433 sess = rte_cryptodev_sym_session_create(dev_id, 434 &auth_xform); 435 } 436 } 437 } 438 return sess; 439 } 440 441 int 442 cperf_get_op_functions(const struct cperf_options *options, 443 struct cperf_op_fns *op_fns) 444 { 445 memset(op_fns, 0, sizeof(struct cperf_op_fns)); 446 447 op_fns->sess_create = cperf_create_session; 448 449 if (options->op_type == CPERF_AEAD 450 || options->op_type == CPERF_AUTH_THEN_CIPHER 451 || options->op_type == CPERF_CIPHER_THEN_AUTH) { 452 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM && 453 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) 454 op_fns->populate_ops = cperf_set_ops_aead; 455 else 456 op_fns->populate_ops = cperf_set_ops_cipher_auth; 457 return 0; 458 } 459 if (options->op_type == CPERF_AUTH_ONLY) { 460 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL) 461 op_fns->populate_ops = cperf_set_ops_null_auth; 462 else 463 op_fns->populate_ops = cperf_set_ops_auth; 464 return 0; 465 } 466 if (options->op_type == CPERF_CIPHER_ONLY) { 467 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL) 468 op_fns->populate_ops = cperf_set_ops_null_cipher; 469 else 470 op_fns->populate_ops = cperf_set_ops_cipher; 471 return 0; 472 } 473 474 return -1; 475 } 476