1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Intel Corporation. 3 * All rights reserved. 4 */ 5 6 #include "test.h" 7 #include <string.h> 8 #include <rte_common.h> 9 #include <rte_malloc.h> 10 #include <rte_mempool.h> 11 #include <rte_mbuf.h> 12 #include <rte_cryptodev.h> 13 14 #ifdef RTE_EXEC_ENV_WINDOWS 15 static int 16 test_event_crypto_adapter(void) 17 { 18 printf("event_crypto_adapter not supported on Windows, skipping test\n"); 19 return TEST_SKIPPED; 20 } 21 22 #else 23 24 #include <rte_eventdev.h> 25 #include <rte_bus_vdev.h> 26 #include <rte_service.h> 27 #include <rte_event_crypto_adapter.h> 28 29 #define PKT_TRACE 0 30 #define NUM 1 31 #define DEFAULT_NUM_XFORMS (2) 32 #define NUM_MBUFS (8191) 33 #define MBUF_CACHE_SIZE (256) 34 #define MAXIMUM_IV_LENGTH (16) 35 #define DEFAULT_NUM_OPS_INFLIGHT (128) 36 #define MAX_NB_SESSIONS 4 37 #define TEST_APP_PORT_ID 0 38 #define TEST_APP_EV_QUEUE_ID 0 39 #define TEST_APP_EV_PRIORITY 0 40 #define TEST_APP_EV_FLOWID 0xAABB 41 #define TEST_CRYPTO_EV_QUEUE_ID 1 42 #define TEST_ADAPTER_ID 0 43 #define TEST_CDEV_ID 0 44 #define TEST_CDEV_QP_ID 0 45 #define PACKET_LENGTH 64 46 #define NB_TEST_PORTS 1 47 #define NB_TEST_QUEUES 2 48 #define NUM_CORES 1 49 #define CRYPTODEV_NAME_NULL_PMD crypto_null 50 51 #define MBUF_SIZE (sizeof(struct rte_mbuf) + \ 52 RTE_PKTMBUF_HEADROOM + PACKET_LENGTH) 53 #define IV_OFFSET (sizeof(struct rte_crypto_op) + \ 54 sizeof(struct rte_crypto_sym_op) + \ 55 DEFAULT_NUM_XFORMS * \ 56 sizeof(struct rte_crypto_sym_xform)) 57 58 /* Handle log statements in same manner as test macros */ 59 #define LOG_DBG(...) RTE_LOG(DEBUG, EAL, __VA_ARGS__) 60 61 static const uint8_t text_64B[] = { 62 0x05, 0x15, 0x77, 0x32, 0xc9, 0x66, 0x91, 0x50, 63 0x93, 0x9f, 0xbb, 0x4e, 0x2e, 0x5a, 0x02, 0xd0, 64 0x2d, 0x9d, 0x31, 0x5d, 0xc8, 0x9e, 0x86, 0x36, 65 0x54, 0x5c, 0x50, 0xe8, 0x75, 0x54, 0x74, 0x5e, 66 0xd5, 0xa2, 0x84, 0x21, 0x2d, 0xc5, 0xf8, 0x1c, 67 0x55, 0x1a, 0xba, 0x91, 0xce, 0xb5, 0xa3, 0x1e, 68 0x31, 0xbf, 0xe9, 0xa1, 0x97, 0x5c, 0x2b, 0xd6, 69 0x57, 0xa5, 0x9f, 0xab, 0xbd, 0xb0, 0x9b, 0x9c 70 }; 71 #define DATA_SIZE 512 72 struct modex_test_data { 73 enum rte_crypto_asym_xform_type xform_type; 74 struct { 75 uint8_t data[DATA_SIZE]; 76 uint16_t len; 77 } base; 78 struct { 79 uint8_t data[DATA_SIZE]; 80 uint16_t len; 81 } exponent; 82 struct { 83 uint8_t data[DATA_SIZE]; 84 uint16_t len; 85 } modulus; 86 struct { 87 uint8_t data[DATA_SIZE]; 88 uint16_t len; 89 } reminder; 90 uint16_t result_len; 91 }; 92 93 static struct 94 modex_test_data modex_test_case = { 95 .xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX, 96 .base = { 97 .data = { 98 0xF8, 0xBA, 0x1A, 0x55, 0xD0, 0x2F, 0x85, 99 0xAE, 0x96, 0x7B, 0xB6, 0x2F, 0xB6, 0xCD, 100 0xA8, 0xEB, 0x7E, 0x78, 0xA0, 0x50 101 }, 102 .len = 20, 103 }, 104 .exponent = { 105 .data = { 106 0x01, 0x00, 0x01 107 }, 108 .len = 3, 109 }, 110 .reminder = { 111 .data = { 112 0x2C, 0x60, 0x75, 0x45, 0x98, 0x9D, 0xE0, 0x72, 113 0xA0, 0x9D, 0x3A, 0x9E, 0x03, 0x38, 0x73, 0x3C, 114 0x31, 0x83, 0x04, 0xFE, 0x75, 0x43, 0xE6, 0x17, 115 0x5C, 0x01, 0x29, 0x51, 0x69, 0x33, 0x62, 0x2D, 116 0x78, 0xBE, 0xAE, 0xC4, 0xBC, 0xDE, 0x7E, 0x2C, 117 0x77, 0x84, 0xF2, 0xC5, 0x14, 0xB5, 0x2F, 0xF7, 118 0xC5, 0x94, 0xEF, 0x86, 0x75, 0x75, 0xB5, 0x11, 119 0xE5, 0x0E, 0x0A, 0x29, 0x76, 0xE2, 0xEA, 0x32, 120 0x0E, 0x43, 0x77, 0x7E, 0x2C, 0x27, 0xAC, 0x3B, 121 0x86, 0xA5, 0xDB, 0xC9, 0x48, 0x40, 0xE8, 0x99, 122 0x9A, 0x0A, 0x3D, 0xD6, 0x74, 0xFA, 0x2E, 0x2E, 123 0x5B, 0xAF, 0x8C, 0x99, 0x44, 0x2A, 0x67, 0x38, 124 0x27, 0x41, 0x59, 0x9D, 0xB8, 0x51, 0xC9, 0xF7, 125 0x43, 0x61, 0x31, 0x6E, 0xF1, 0x25, 0x38, 0x7F, 126 0xAE, 0xC6, 0xD0, 0xBB, 0x29, 0x76, 0x3F, 0x46, 127 0x2E, 0x1B, 0xE4, 0x67, 0x71, 0xE3, 0x87, 0x5A 128 }, 129 .len = 128, 130 }, 131 .modulus = { 132 .data = { 133 0xb3, 0xa1, 0xaf, 0xb7, 0x13, 0x08, 0x00, 0x0a, 134 0x35, 0xdc, 0x2b, 0x20, 0x8d, 0xa1, 0xb5, 0xce, 135 0x47, 0x8a, 0xc3, 0x80, 0xf4, 0x7d, 0x4a, 0xa2, 136 0x62, 0xfd, 0x61, 0x7f, 0xb5, 0xa8, 0xde, 0x0a, 137 0x17, 0x97, 0xa0, 0xbf, 0xdf, 0x56, 0x5a, 0x3d, 138 0x51, 0x56, 0x4f, 0x70, 0x70, 0x3f, 0x63, 0x6a, 139 0x44, 0x5b, 0xad, 0x84, 0x0d, 0x3f, 0x27, 0x6e, 140 0x3b, 0x34, 0x91, 0x60, 0x14, 0xb9, 0xaa, 0x72, 141 0xfd, 0xa3, 0x64, 0xd2, 0x03, 0xa7, 0x53, 0x87, 142 0x9e, 0x88, 0x0b, 0xc1, 0x14, 0x93, 0x1a, 0x62, 143 0xff, 0xb1, 0x5d, 0x74, 0xcd, 0x59, 0x63, 0x18, 144 0x11, 0x3d, 0x4f, 0xba, 0x75, 0xd4, 0x33, 0x4e, 145 0x23, 0x6b, 0x7b, 0x57, 0x44, 0xe1, 0xd3, 0x03, 146 0x13, 0xa6, 0xf0, 0x8b, 0x60, 0xb0, 0x9e, 0xee, 147 0x75, 0x08, 0x9d, 0x71, 0x63, 0x13, 0xcb, 0xa6, 148 0x81, 0x92, 0x14, 0x03, 0x22, 0x2d, 0xde, 0x55 149 }, 150 .len = 128, 151 }, 152 .result_len = 128, 153 }; 154 155 struct event_crypto_adapter_test_params { 156 struct rte_mempool *mbuf_pool; 157 struct rte_mempool *op_mpool; 158 struct rte_mempool *asym_op_mpool; 159 struct rte_mempool *session_mpool; 160 struct rte_mempool *session_priv_mpool; 161 struct rte_mempool *asym_sess_mpool; 162 struct rte_cryptodev_config *config; 163 uint8_t crypto_event_port_id; 164 uint8_t internal_port_op_fwd; 165 }; 166 167 struct rte_event response_info = { 168 .queue_id = TEST_APP_EV_QUEUE_ID, 169 .sched_type = RTE_SCHED_TYPE_ATOMIC, 170 .flow_id = TEST_APP_EV_FLOWID, 171 .priority = TEST_APP_EV_PRIORITY 172 }; 173 174 struct rte_event_crypto_request request_info = { 175 .cdev_id = TEST_CDEV_ID, 176 .queue_pair_id = TEST_CDEV_QP_ID 177 }; 178 179 static struct event_crypto_adapter_test_params params; 180 static uint8_t crypto_adapter_setup_done; 181 static uint32_t slcore_id; 182 static int evdev; 183 184 static struct rte_mbuf * 185 alloc_fill_mbuf(struct rte_mempool *mpool, const uint8_t *data, 186 size_t len, uint8_t blocksize) 187 { 188 struct rte_mbuf *m = rte_pktmbuf_alloc(mpool); 189 size_t t_len = len - (blocksize ? (len % blocksize) : 0); 190 191 if (m) { 192 char *dst = rte_pktmbuf_append(m, t_len); 193 194 if (!dst) { 195 rte_pktmbuf_free(m); 196 return NULL; 197 } 198 199 rte_memcpy(dst, (const void *)data, t_len); 200 } 201 return m; 202 } 203 204 static int 205 send_recv_ev(struct rte_event *ev) 206 { 207 struct rte_crypto_op *op; 208 struct rte_event recv_ev; 209 int ret; 210 211 if (params.internal_port_op_fwd) 212 ret = rte_event_crypto_adapter_enqueue(evdev, TEST_APP_PORT_ID, 213 ev, NUM); 214 else 215 ret = rte_event_enqueue_burst(evdev, TEST_APP_PORT_ID, ev, NUM); 216 TEST_ASSERT_EQUAL(ret, NUM, "Failed to send event to crypto adapter\n"); 217 218 while (rte_event_dequeue_burst(evdev, 219 TEST_APP_PORT_ID, &recv_ev, NUM, 0) == 0) 220 rte_pause(); 221 222 op = recv_ev.event_ptr; 223 if (op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC) { 224 #if PKT_TRACE 225 struct rte_mbuf *m = op->sym->m_src; 226 rte_pktmbuf_dump(stdout, m, rte_pktmbuf_pkt_len(m)); 227 #endif 228 rte_pktmbuf_free(op->sym->m_src); 229 } else { 230 uint8_t *data_expected = NULL, *data_received = NULL; 231 uint32_t data_size; 232 233 data_expected = modex_test_case.reminder.data; 234 data_received = op->asym->modex.result.data; 235 data_size = op->asym->modex.result.length; 236 ret = memcmp(data_expected, data_received, data_size); 237 TEST_ASSERT_EQUAL(ret, 0, 238 "Data mismatch for asym crypto adapter\n"); 239 rte_free(op->asym->modex.result.data); 240 } 241 rte_crypto_op_free(op); 242 243 return TEST_SUCCESS; 244 } 245 246 static int 247 test_crypto_adapter_stats(void) 248 { 249 struct rte_event_crypto_adapter_stats stats; 250 251 rte_event_crypto_adapter_stats_get(TEST_ADAPTER_ID, &stats); 252 printf(" +------------------------------------------------------+\n"); 253 printf(" + Crypto adapter stats for instance %u:\n", TEST_ADAPTER_ID); 254 printf(" + Event port poll count %" PRIx64 "\n", 255 stats.event_poll_count); 256 printf(" + Event dequeue count %" PRIx64 "\n", 257 stats.event_deq_count); 258 printf(" + Cryptodev enqueue count %" PRIx64 "\n", 259 stats.crypto_enq_count); 260 printf(" + Cryptodev enqueue failed count %" PRIx64 "\n", 261 stats.crypto_enq_fail); 262 printf(" + Cryptodev dequeue count %" PRIx64 "\n", 263 stats.crypto_deq_count); 264 printf(" + Event enqueue count %" PRIx64 "\n", 265 stats.event_enq_count); 266 printf(" + Event enqueue retry count %" PRIx64 "\n", 267 stats.event_enq_retry_count); 268 printf(" + Event enqueue fail count %" PRIx64 "\n", 269 stats.event_enq_fail_count); 270 printf(" +------------------------------------------------------+\n"); 271 272 rte_event_crypto_adapter_stats_reset(TEST_ADAPTER_ID); 273 return TEST_SUCCESS; 274 } 275 276 static int 277 test_op_forward_mode(uint8_t session_less) 278 { 279 struct rte_crypto_sym_xform cipher_xform; 280 struct rte_cryptodev_sym_session *sess; 281 union rte_event_crypto_metadata m_data; 282 struct rte_crypto_sym_op *sym_op; 283 struct rte_crypto_op *op; 284 struct rte_mbuf *m; 285 struct rte_event ev; 286 uint32_t cap; 287 int ret; 288 289 memset(&m_data, 0, sizeof(m_data)); 290 291 m = alloc_fill_mbuf(params.mbuf_pool, text_64B, PACKET_LENGTH, 0); 292 TEST_ASSERT_NOT_NULL(m, "Failed to allocate mbuf!\n"); 293 #if PKT_TRACE 294 rte_pktmbuf_dump(stdout, m, rte_pktmbuf_pkt_len(m)); 295 #endif 296 /* Setup Cipher Parameters */ 297 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 298 cipher_xform.next = NULL; 299 cipher_xform.cipher.algo = RTE_CRYPTO_CIPHER_NULL; 300 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT; 301 302 op = rte_crypto_op_alloc(params.op_mpool, 303 RTE_CRYPTO_OP_TYPE_SYMMETRIC); 304 TEST_ASSERT_NOT_NULL(op, 305 "Failed to allocate symmetric crypto operation struct\n"); 306 307 sym_op = op->sym; 308 309 if (!session_less) { 310 sess = rte_cryptodev_sym_session_create( 311 params.session_mpool); 312 TEST_ASSERT_NOT_NULL(sess, "Session creation failed\n"); 313 314 /* Create Crypto session*/ 315 ret = rte_cryptodev_sym_session_init(TEST_CDEV_ID, sess, 316 &cipher_xform, params.session_priv_mpool); 317 TEST_ASSERT_SUCCESS(ret, "Failed to init session\n"); 318 319 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, 320 &cap); 321 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 322 323 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_SESSION_PRIVATE_DATA) { 324 /* Fill in private user data information */ 325 m_data.request_info.cdev_id = request_info.cdev_id; 326 m_data.request_info.queue_pair_id = 327 request_info.queue_pair_id; 328 m_data.response_info.event = response_info.event; 329 rte_cryptodev_session_event_mdata_set(TEST_CDEV_ID, 330 sess, RTE_CRYPTO_OP_TYPE_SYMMETRIC, 331 RTE_CRYPTO_OP_WITH_SESSION, 332 &m_data, sizeof(m_data)); 333 } 334 335 rte_crypto_op_attach_sym_session(op, sess); 336 } else { 337 struct rte_crypto_sym_xform *first_xform; 338 339 rte_crypto_op_sym_xforms_alloc(op, NUM); 340 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS; 341 first_xform = &cipher_xform; 342 sym_op->xform = first_xform; 343 uint32_t len = IV_OFFSET + MAXIMUM_IV_LENGTH; 344 op->private_data_offset = len; 345 /* Fill in private data information */ 346 m_data.request_info.cdev_id = request_info.cdev_id; 347 m_data.request_info.queue_pair_id = request_info.queue_pair_id; 348 m_data.response_info.event = response_info.event; 349 rte_memcpy((uint8_t *)op + len, &m_data, sizeof(m_data)); 350 } 351 352 sym_op->m_src = m; 353 sym_op->cipher.data.offset = 0; 354 sym_op->cipher.data.length = PACKET_LENGTH; 355 356 /* Fill in event info and update event_ptr with rte_crypto_op */ 357 memset(&ev, 0, sizeof(ev)); 358 ev.queue_id = TEST_CRYPTO_EV_QUEUE_ID; 359 ev.sched_type = RTE_SCHED_TYPE_ATOMIC; 360 ev.flow_id = 0xAABB; 361 ev.event_ptr = op; 362 363 ret = send_recv_ev(&ev); 364 TEST_ASSERT_SUCCESS(ret, "Failed to send/receive event to " 365 "crypto adapter\n"); 366 367 test_crypto_adapter_stats(); 368 369 return TEST_SUCCESS; 370 } 371 372 static int 373 map_adapter_service_core(void) 374 { 375 uint32_t adapter_service_id; 376 int ret; 377 378 if (rte_event_crypto_adapter_service_id_get(TEST_ADAPTER_ID, 379 &adapter_service_id) == 0) { 380 uint32_t core_list[NUM_CORES]; 381 382 ret = rte_service_lcore_list(core_list, NUM_CORES); 383 TEST_ASSERT(ret >= 0, "Failed to get service core list!"); 384 385 if (core_list[0] != slcore_id) { 386 TEST_ASSERT_SUCCESS(rte_service_lcore_add(slcore_id), 387 "Failed to add service core"); 388 TEST_ASSERT_SUCCESS(rte_service_lcore_start(slcore_id), 389 "Failed to start service core"); 390 } 391 392 TEST_ASSERT_SUCCESS(rte_service_map_lcore_set( 393 adapter_service_id, slcore_id, 1), 394 "Failed to map adapter service"); 395 } 396 397 return TEST_SUCCESS; 398 } 399 400 static int 401 test_sessionless_with_op_forward_mode(void) 402 { 403 uint32_t cap; 404 int ret; 405 406 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 407 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 408 409 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 410 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 411 map_adapter_service_core(); 412 else { 413 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD)) 414 return TEST_SKIPPED; 415 } 416 417 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 418 "Failed to start event crypto adapter"); 419 420 ret = test_op_forward_mode(1); 421 TEST_ASSERT_SUCCESS(ret, "Sessionless - FORWARD mode test failed\n"); 422 return TEST_SUCCESS; 423 } 424 425 static int 426 test_session_with_op_forward_mode(void) 427 { 428 uint32_t cap; 429 int ret; 430 431 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 432 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 433 434 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 435 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 436 map_adapter_service_core(); 437 else { 438 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD)) 439 return TEST_SKIPPED; 440 } 441 442 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID 443 ), "Failed to start event crypto adapter"); 444 445 ret = test_op_forward_mode(0); 446 TEST_ASSERT_SUCCESS(ret, "Session based - FORWARD mode test failed\n"); 447 return TEST_SUCCESS; 448 } 449 450 static int 451 test_asym_op_forward_mode(uint8_t session_less) 452 { 453 const struct rte_cryptodev_asymmetric_xform_capability *capability; 454 struct rte_cryptodev_asym_capability_idx cap_idx; 455 struct rte_crypto_asym_xform xform_tc; 456 union rte_event_crypto_metadata m_data; 457 struct rte_cryptodev_info dev_info; 458 struct rte_crypto_asym_op *asym_op; 459 struct rte_crypto_op *op; 460 uint8_t input[4096] = {0}; 461 uint8_t *result = NULL; 462 struct rte_event ev; 463 void *sess = NULL; 464 uint32_t cap; 465 int ret; 466 467 memset(&m_data, 0, sizeof(m_data)); 468 469 rte_cryptodev_info_get(TEST_CDEV_ID, &dev_info); 470 if (session_less && !(dev_info.feature_flags & 471 RTE_CRYPTODEV_FF_ASYM_SESSIONLESS)) { 472 RTE_LOG(INFO, USER1, 473 "Device doesn't support Asym sessionless ops. Test Skipped\n"); 474 return TEST_SKIPPED; 475 } 476 /* Setup Cipher Parameters */ 477 xform_tc.next = NULL; 478 xform_tc.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX; 479 cap_idx.type = xform_tc.xform_type; 480 capability = rte_cryptodev_asym_capability_get(TEST_CDEV_ID, &cap_idx); 481 482 if (capability == NULL) { 483 RTE_LOG(INFO, USER1, 484 "Device doesn't support MODEX. Test Skipped\n"); 485 return TEST_SKIPPED; 486 } 487 488 op = rte_crypto_op_alloc(params.asym_op_mpool, 489 RTE_CRYPTO_OP_TYPE_ASYMMETRIC); 490 TEST_ASSERT_NOT_NULL(op, 491 "Failed to allocate asymmetric crypto operation struct\n"); 492 493 asym_op = op->asym; 494 495 result = rte_zmalloc(NULL, modex_test_case.result_len, 0); 496 xform_tc.modex.modulus.data = modex_test_case.modulus.data; 497 xform_tc.modex.modulus.length = modex_test_case.modulus.len; 498 xform_tc.modex.exponent.data = modex_test_case.exponent.data; 499 xform_tc.modex.exponent.length = modex_test_case.exponent.len; 500 memcpy(input, modex_test_case.base.data, 501 modex_test_case.base.len); 502 asym_op->modex.base.data = input; 503 asym_op->modex.base.length = modex_test_case.base.len; 504 asym_op->modex.result.data = result; 505 asym_op->modex.result.length = modex_test_case.result_len; 506 if (rte_cryptodev_asym_xform_capability_check_modlen(capability, 507 xform_tc.modex.modulus.length)) { 508 RTE_LOG(INFO, USER1, 509 "line %u FAILED: %s", __LINE__, 510 "Invalid MODULUS length specified"); 511 return TEST_FAILED; 512 } 513 514 if (!session_less) { 515 /* Create Crypto session*/ 516 ret = rte_cryptodev_asym_session_create(TEST_CDEV_ID, 517 &xform_tc, params.asym_sess_mpool, &sess); 518 TEST_ASSERT_SUCCESS(ret, "Failed to init session\n"); 519 520 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, 521 &cap); 522 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 523 524 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_SESSION_PRIVATE_DATA) { 525 /* Fill in private user data information */ 526 m_data.request_info.cdev_id = request_info.cdev_id; 527 m_data.request_info.queue_pair_id = 528 request_info.queue_pair_id; 529 m_data.response_info.event = response_info.event; 530 rte_cryptodev_session_event_mdata_set(TEST_CDEV_ID, 531 sess, RTE_CRYPTO_OP_TYPE_ASYMMETRIC, 532 RTE_CRYPTO_OP_WITH_SESSION, 533 &m_data, sizeof(m_data)); 534 } 535 536 rte_crypto_op_attach_asym_session(op, sess); 537 } else { 538 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS; 539 asym_op->xform = &xform_tc; 540 op->private_data_offset = (sizeof(struct rte_crypto_op) + 541 sizeof(struct rte_crypto_asym_op) + 542 DEFAULT_NUM_XFORMS * 543 sizeof(struct rte_crypto_asym_xform)); 544 /* Fill in private data information */ 545 m_data.request_info.cdev_id = request_info.cdev_id; 546 m_data.request_info.queue_pair_id = request_info.queue_pair_id; 547 m_data.response_info.event = response_info.event; 548 rte_memcpy((uint8_t *)op + op->private_data_offset, 549 &m_data, sizeof(m_data)); 550 } 551 /* Fill in event info and update event_ptr with rte_crypto_op */ 552 memset(&ev, 0, sizeof(ev)); 553 ev.queue_id = TEST_CRYPTO_EV_QUEUE_ID; 554 ev.sched_type = RTE_SCHED_TYPE_ATOMIC; 555 ev.flow_id = 0xAABB; 556 ev.event_ptr = op; 557 558 ret = send_recv_ev(&ev); 559 TEST_ASSERT_SUCCESS(ret, "Failed to send/receive event to " 560 "crypto adapter\n"); 561 562 test_crypto_adapter_stats(); 563 564 return TEST_SUCCESS; 565 } 566 567 568 static int 569 test_asym_sessionless_with_op_forward_mode(void) 570 { 571 uint32_t cap; 572 int ret; 573 574 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 575 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 576 577 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 578 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 579 map_adapter_service_core(); 580 else { 581 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD)) 582 return TEST_SKIPPED; 583 } 584 585 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 586 "Failed to start event crypto adapter"); 587 588 return test_asym_op_forward_mode(1); 589 } 590 591 static int 592 test_asym_session_with_op_forward_mode(void) 593 { 594 uint32_t cap; 595 int ret; 596 597 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 598 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 599 600 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 601 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 602 map_adapter_service_core(); 603 else { 604 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD)) 605 return TEST_SKIPPED; 606 } 607 608 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID 609 ), "Failed to start event crypto adapter"); 610 611 return test_asym_op_forward_mode(0); 612 } 613 614 static int 615 send_op_recv_ev(struct rte_crypto_op *op) 616 { 617 struct rte_crypto_op *recv_op; 618 struct rte_event ev; 619 int ret; 620 621 ret = rte_cryptodev_enqueue_burst(TEST_CDEV_ID, TEST_CDEV_QP_ID, 622 &op, NUM); 623 TEST_ASSERT_EQUAL(ret, NUM, "Failed to enqueue to cryptodev\n"); 624 memset(&ev, 0, sizeof(ev)); 625 626 while (rte_event_dequeue_burst(evdev, 627 TEST_APP_PORT_ID, &ev, NUM, 0) == 0) 628 rte_pause(); 629 630 recv_op = ev.event_ptr; 631 if (recv_op->type == RTE_CRYPTO_OP_TYPE_SYMMETRIC) { 632 #if PKT_TRACE 633 struct rte_mbuf *m = recv_op->sym->m_src; 634 rte_pktmbuf_dump(stdout, m, rte_pktmbuf_pkt_len(m)); 635 #endif 636 rte_pktmbuf_free(recv_op->sym->m_src); 637 } else { 638 uint8_t *data_expected = NULL, *data_received = NULL; 639 uint32_t data_size; 640 641 data_expected = modex_test_case.reminder.data; 642 data_received = op->asym->modex.result.data; 643 data_size = op->asym->modex.result.length; 644 ret = memcmp(data_expected, data_received, data_size); 645 TEST_ASSERT_EQUAL(ret, 0, 646 "Data mismatch for asym crypto adapter\n"); 647 rte_free(op->asym->modex.result.data); 648 } 649 rte_crypto_op_free(recv_op); 650 651 return TEST_SUCCESS; 652 } 653 654 static int 655 test_op_new_mode(uint8_t session_less) 656 { 657 struct rte_crypto_sym_xform cipher_xform; 658 struct rte_cryptodev_sym_session *sess; 659 union rte_event_crypto_metadata m_data; 660 struct rte_crypto_sym_op *sym_op; 661 struct rte_crypto_op *op; 662 struct rte_mbuf *m; 663 uint32_t cap; 664 int ret; 665 666 memset(&m_data, 0, sizeof(m_data)); 667 668 m = alloc_fill_mbuf(params.mbuf_pool, text_64B, PACKET_LENGTH, 0); 669 TEST_ASSERT_NOT_NULL(m, "Failed to allocate mbuf!\n"); 670 #if PKT_TRACE 671 rte_pktmbuf_dump(stdout, m, rte_pktmbuf_pkt_len(m)); 672 #endif 673 /* Setup Cipher Parameters */ 674 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER; 675 cipher_xform.next = NULL; 676 cipher_xform.cipher.algo = RTE_CRYPTO_CIPHER_NULL; 677 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT; 678 679 op = rte_crypto_op_alloc(params.op_mpool, 680 RTE_CRYPTO_OP_TYPE_SYMMETRIC); 681 TEST_ASSERT_NOT_NULL(op, "Failed to allocate crypto_op!\n"); 682 683 sym_op = op->sym; 684 685 if (!session_less) { 686 sess = rte_cryptodev_sym_session_create( 687 params.session_mpool); 688 TEST_ASSERT_NOT_NULL(sess, "Session creation failed\n"); 689 690 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, 691 &cap); 692 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 693 694 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_SESSION_PRIVATE_DATA) { 695 /* Fill in private user data information */ 696 m_data.response_info.event = response_info.event; 697 rte_cryptodev_session_event_mdata_set(TEST_CDEV_ID, 698 sess, RTE_CRYPTO_OP_TYPE_SYMMETRIC, 699 RTE_CRYPTO_OP_WITH_SESSION, 700 &m_data, sizeof(m_data)); 701 } 702 ret = rte_cryptodev_sym_session_init(TEST_CDEV_ID, sess, 703 &cipher_xform, params.session_priv_mpool); 704 TEST_ASSERT_SUCCESS(ret, "Failed to init session\n"); 705 706 rte_crypto_op_attach_sym_session(op, sess); 707 } else { 708 struct rte_crypto_sym_xform *first_xform; 709 710 rte_crypto_op_sym_xforms_alloc(op, NUM); 711 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS; 712 first_xform = &cipher_xform; 713 sym_op->xform = first_xform; 714 uint32_t len = IV_OFFSET + MAXIMUM_IV_LENGTH; 715 op->private_data_offset = len; 716 /* Fill in private data information */ 717 m_data.response_info.event = response_info.event; 718 rte_memcpy((uint8_t *)op + len, &m_data, sizeof(m_data)); 719 } 720 721 sym_op->m_src = m; 722 sym_op->cipher.data.offset = 0; 723 sym_op->cipher.data.length = PACKET_LENGTH; 724 725 ret = send_op_recv_ev(op); 726 TEST_ASSERT_SUCCESS(ret, "Failed to enqueue op to cryptodev\n"); 727 728 test_crypto_adapter_stats(); 729 730 return TEST_SUCCESS; 731 } 732 733 static int 734 test_sessionless_with_op_new_mode(void) 735 { 736 uint32_t cap; 737 int ret; 738 739 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 740 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 741 742 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 743 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 744 map_adapter_service_core(); 745 else { 746 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 747 return TEST_SKIPPED; 748 } 749 750 /* start the event crypto adapter */ 751 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 752 "Failed to start event crypto adapter"); 753 754 ret = test_op_new_mode(1); 755 TEST_ASSERT_SUCCESS(ret, "Sessionless - NEW mode test failed\n"); 756 return TEST_SUCCESS; 757 } 758 759 static int 760 test_session_with_op_new_mode(void) 761 { 762 uint32_t cap; 763 int ret; 764 765 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 766 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 767 768 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 769 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 770 map_adapter_service_core(); 771 else { 772 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 773 return TEST_SKIPPED; 774 } 775 776 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 777 "Failed to start event crypto adapter"); 778 779 ret = test_op_new_mode(0); 780 TEST_ASSERT_SUCCESS(ret, "Session based - NEW mode test failed\n"); 781 return TEST_SUCCESS; 782 } 783 784 static int 785 test_asym_op_new_mode(uint8_t session_less) 786 { 787 const struct rte_cryptodev_asymmetric_xform_capability *capability; 788 struct rte_cryptodev_asym_capability_idx cap_idx; 789 struct rte_crypto_asym_xform xform_tc; 790 union rte_event_crypto_metadata m_data; 791 struct rte_cryptodev_info dev_info; 792 struct rte_crypto_asym_op *asym_op; 793 struct rte_crypto_op *op; 794 uint8_t input[4096] = {0}; 795 uint8_t *result = NULL; 796 void *sess = NULL; 797 uint32_t cap; 798 int ret; 799 800 memset(&m_data, 0, sizeof(m_data)); 801 802 rte_cryptodev_info_get(TEST_CDEV_ID, &dev_info); 803 if (session_less && !(dev_info.feature_flags & 804 RTE_CRYPTODEV_FF_ASYM_SESSIONLESS)) { 805 RTE_LOG(INFO, USER1, 806 "Device doesn't support Asym sessionless ops. Test Skipped\n"); 807 return TEST_SKIPPED; 808 } 809 /* Setup Cipher Parameters */ 810 xform_tc.next = NULL; 811 xform_tc.xform_type = RTE_CRYPTO_ASYM_XFORM_MODEX; 812 cap_idx.type = xform_tc.xform_type; 813 capability = rte_cryptodev_asym_capability_get(TEST_CDEV_ID, &cap_idx); 814 815 if (capability == NULL) { 816 RTE_LOG(INFO, USER1, 817 "Device doesn't support MODEX. Test Skipped\n"); 818 return TEST_SKIPPED; 819 } 820 821 op = rte_crypto_op_alloc(params.asym_op_mpool, 822 RTE_CRYPTO_OP_TYPE_ASYMMETRIC); 823 TEST_ASSERT_NOT_NULL(op, "Failed to allocate asym crypto_op!\n"); 824 825 asym_op = op->asym; 826 827 result = rte_zmalloc(NULL, modex_test_case.result_len, 0); 828 xform_tc.modex.modulus.data = modex_test_case.modulus.data; 829 xform_tc.modex.modulus.length = modex_test_case.modulus.len; 830 xform_tc.modex.exponent.data = modex_test_case.exponent.data; 831 xform_tc.modex.exponent.length = modex_test_case.exponent.len; 832 memcpy(input, modex_test_case.base.data, 833 modex_test_case.base.len); 834 asym_op->modex.base.data = input; 835 asym_op->modex.base.length = modex_test_case.base.len; 836 asym_op->modex.result.data = result; 837 asym_op->modex.result.length = modex_test_case.result_len; 838 if (rte_cryptodev_asym_xform_capability_check_modlen(capability, 839 xform_tc.modex.modulus.length)) { 840 RTE_LOG(INFO, USER1, 841 "line %u FAILED: %s", __LINE__, 842 "Invalid MODULUS length specified"); 843 return TEST_FAILED; 844 } 845 846 if (!session_less) { 847 ret = rte_cryptodev_asym_session_create(TEST_CDEV_ID, 848 &xform_tc, params.asym_sess_mpool, &sess); 849 TEST_ASSERT_NOT_NULL(sess, "Session creation failed\n"); 850 TEST_ASSERT_SUCCESS(ret, "Failed to init session\n"); 851 852 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, 853 &cap); 854 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 855 856 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_SESSION_PRIVATE_DATA) { 857 /* Fill in private user data information */ 858 m_data.response_info.event = response_info.event; 859 rte_cryptodev_session_event_mdata_set(TEST_CDEV_ID, 860 sess, RTE_CRYPTO_OP_TYPE_ASYMMETRIC, 861 RTE_CRYPTO_OP_WITH_SESSION, 862 &m_data, sizeof(m_data)); 863 } 864 865 rte_crypto_op_attach_asym_session(op, sess); 866 } else { 867 op->sess_type = RTE_CRYPTO_OP_SESSIONLESS; 868 asym_op->xform = &xform_tc; 869 op->private_data_offset = (sizeof(struct rte_crypto_op) + 870 sizeof(struct rte_crypto_asym_op) + 871 DEFAULT_NUM_XFORMS * 872 sizeof(struct rte_crypto_asym_xform)); 873 /* Fill in private data information */ 874 m_data.response_info.event = response_info.event; 875 rte_memcpy((uint8_t *)op + op->private_data_offset, 876 &m_data, sizeof(m_data)); 877 } 878 879 ret = send_op_recv_ev(op); 880 TEST_ASSERT_SUCCESS(ret, "Failed to enqueue op to cryptodev\n"); 881 882 test_crypto_adapter_stats(); 883 884 return TEST_SUCCESS; 885 } 886 887 static int 888 test_asym_sessionless_with_op_new_mode(void) 889 { 890 uint32_t cap; 891 int ret; 892 893 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 894 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 895 896 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 897 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 898 map_adapter_service_core(); 899 else { 900 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 901 return TEST_SKIPPED; 902 } 903 904 /* start the event crypto adapter */ 905 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 906 "Failed to start event crypto adapter"); 907 908 return test_asym_op_new_mode(1); 909 } 910 911 static int 912 test_asym_session_with_op_new_mode(void) 913 { 914 uint32_t cap; 915 int ret; 916 917 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 918 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 919 920 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 921 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 922 map_adapter_service_core(); 923 else { 924 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 925 return TEST_SKIPPED; 926 } 927 928 TEST_ASSERT_SUCCESS(rte_event_crypto_adapter_start(TEST_ADAPTER_ID), 929 "Failed to start event crypto adapter"); 930 931 return test_asym_op_new_mode(0); 932 } 933 934 static int 935 configure_cryptodev(void) 936 { 937 const struct rte_cryptodev_capabilities *capability; 938 struct rte_cryptodev_qp_conf qp_conf; 939 struct rte_cryptodev_config conf; 940 struct rte_cryptodev_info info; 941 unsigned int session_size; 942 unsigned int i = 0; 943 uint8_t nb_devs; 944 int ret; 945 946 947 params.mbuf_pool = rte_pktmbuf_pool_create( 948 "CRYPTO_ADAPTER_MBUFPOOL", 949 NUM_MBUFS, MBUF_CACHE_SIZE, 0, MBUF_SIZE, 950 rte_socket_id()); 951 if (params.mbuf_pool == NULL) { 952 RTE_LOG(ERR, USER1, "Can't create CRYPTO_MBUFPOOL\n"); 953 return TEST_FAILED; 954 } 955 956 params.op_mpool = rte_crypto_op_pool_create( 957 "EVENT_CRYPTO_SYM_OP_POOL", 958 RTE_CRYPTO_OP_TYPE_SYMMETRIC, 959 NUM_MBUFS, MBUF_CACHE_SIZE, 960 DEFAULT_NUM_XFORMS * 961 sizeof(struct rte_crypto_sym_xform) + 962 MAXIMUM_IV_LENGTH + 963 sizeof(union rte_event_crypto_metadata), 964 rte_socket_id()); 965 if (params.op_mpool == NULL) { 966 RTE_LOG(ERR, USER1, "Can't create CRYPTO_OP_POOL\n"); 967 return TEST_FAILED; 968 } 969 970 /* Create a NULL crypto device */ 971 nb_devs = rte_cryptodev_device_count_by_driver( 972 rte_cryptodev_driver_id_get( 973 RTE_STR(CRYPTODEV_NAME_NULL_PMD))); 974 if (!nb_devs) { 975 ret = rte_vdev_init( 976 RTE_STR(CRYPTODEV_NAME_NULL_PMD), NULL); 977 978 TEST_ASSERT(ret == 0, "Failed to create pmd:%s instance\n", 979 RTE_STR(CRYPTODEV_NAME_NULL_PMD)); 980 } 981 982 nb_devs = rte_cryptodev_count(); 983 if (!nb_devs) { 984 RTE_LOG(ERR, USER1, "No crypto devices found!\n"); 985 return TEST_FAILED; 986 } 987 988 /* 989 * Create mempool with maximum number of sessions * 2, 990 * to include the session headers & private data 991 */ 992 session_size = rte_cryptodev_sym_get_private_session_size(TEST_CDEV_ID); 993 session_size += sizeof(union rte_event_crypto_metadata); 994 995 params.session_mpool = rte_cryptodev_sym_session_pool_create( 996 "CRYPTO_ADAPTER_SESSION_MP", 997 MAX_NB_SESSIONS, 0, 0, 998 sizeof(union rte_event_crypto_metadata), 999 SOCKET_ID_ANY); 1000 TEST_ASSERT_NOT_NULL(params.session_mpool, 1001 "session mempool allocation failed\n"); 1002 1003 params.session_priv_mpool = rte_mempool_create( 1004 "CRYPTO_AD_SESS_MP_PRIV", 1005 MAX_NB_SESSIONS, 1006 session_size, 1007 0, 0, NULL, NULL, NULL, 1008 NULL, SOCKET_ID_ANY, 1009 0); 1010 TEST_ASSERT_NOT_NULL(params.session_priv_mpool, 1011 "session mempool allocation failed\n"); 1012 1013 rte_cryptodev_info_get(TEST_CDEV_ID, &info); 1014 1015 while ((capability = &info.capabilities[i++])->op != 1016 RTE_CRYPTO_OP_TYPE_UNDEFINED) { 1017 if (capability->op == RTE_CRYPTO_OP_TYPE_ASYMMETRIC) { 1018 params.asym_op_mpool = rte_crypto_op_pool_create( 1019 "EVENT_CRYPTO_ASYM_OP_POOL", 1020 RTE_CRYPTO_OP_TYPE_ASYMMETRIC, 1021 NUM_MBUFS, MBUF_CACHE_SIZE, 1022 (DEFAULT_NUM_XFORMS * 1023 sizeof(struct rte_crypto_asym_xform)) + 1024 sizeof(union rte_event_crypto_metadata), 1025 rte_socket_id()); 1026 TEST_ASSERT_NOT_NULL(params.asym_op_mpool, 1027 "Can't create CRYPTO_ASYM_OP_POOL\n"); 1028 1029 params.asym_sess_mpool = 1030 rte_cryptodev_asym_session_pool_create( 1031 "CRYPTO_AD_ASYM_SESS_MP", 1032 MAX_NB_SESSIONS, 0, 1033 sizeof(union rte_event_crypto_metadata), 1034 SOCKET_ID_ANY); 1035 TEST_ASSERT_NOT_NULL(params.asym_sess_mpool, 1036 "asym session mempool allocation failed\n"); 1037 break; 1038 } 1039 } 1040 1041 conf.nb_queue_pairs = info.max_nb_queue_pairs; 1042 conf.socket_id = SOCKET_ID_ANY; 1043 conf.ff_disable = RTE_CRYPTODEV_FF_SECURITY; 1044 1045 TEST_ASSERT_SUCCESS(rte_cryptodev_configure(TEST_CDEV_ID, &conf), 1046 "Failed to configure cryptodev %u with %u qps\n", 1047 TEST_CDEV_ID, conf.nb_queue_pairs); 1048 1049 qp_conf.nb_descriptors = DEFAULT_NUM_OPS_INFLIGHT; 1050 qp_conf.mp_session = params.session_mpool; 1051 qp_conf.mp_session_private = params.session_priv_mpool; 1052 1053 TEST_ASSERT_SUCCESS(rte_cryptodev_queue_pair_setup( 1054 TEST_CDEV_ID, TEST_CDEV_QP_ID, &qp_conf, 1055 rte_cryptodev_socket_id(TEST_CDEV_ID)), 1056 "Failed to setup queue pair %u on cryptodev %u\n", 1057 TEST_CDEV_QP_ID, TEST_CDEV_ID); 1058 1059 return TEST_SUCCESS; 1060 } 1061 1062 static inline void 1063 evdev_set_conf_values(struct rte_event_dev_config *dev_conf, 1064 struct rte_event_dev_info *info) 1065 { 1066 memset(dev_conf, 0, sizeof(struct rte_event_dev_config)); 1067 dev_conf->dequeue_timeout_ns = info->min_dequeue_timeout_ns; 1068 dev_conf->nb_event_ports = NB_TEST_PORTS; 1069 dev_conf->nb_event_queues = NB_TEST_QUEUES; 1070 dev_conf->nb_event_queue_flows = info->max_event_queue_flows; 1071 dev_conf->nb_event_port_dequeue_depth = 1072 info->max_event_port_dequeue_depth; 1073 dev_conf->nb_event_port_enqueue_depth = 1074 info->max_event_port_enqueue_depth; 1075 dev_conf->nb_event_port_enqueue_depth = 1076 info->max_event_port_enqueue_depth; 1077 dev_conf->nb_events_limit = 1078 info->max_num_events; 1079 } 1080 1081 static int 1082 configure_eventdev(void) 1083 { 1084 struct rte_event_queue_conf queue_conf; 1085 struct rte_event_dev_config devconf; 1086 struct rte_event_dev_info info; 1087 uint32_t queue_count; 1088 uint32_t port_count; 1089 int ret; 1090 uint8_t qid; 1091 1092 if (!rte_event_dev_count()) { 1093 /* If there is no hardware eventdev, or no software vdev was 1094 * specified on the command line, create an instance of 1095 * event_sw. 1096 */ 1097 LOG_DBG("Failed to find a valid event device... " 1098 "testing with event_sw device\n"); 1099 TEST_ASSERT_SUCCESS(rte_vdev_init("event_sw0", NULL), 1100 "Error creating eventdev"); 1101 evdev = rte_event_dev_get_dev_id("event_sw0"); 1102 } 1103 1104 ret = rte_event_dev_info_get(evdev, &info); 1105 TEST_ASSERT_SUCCESS(ret, "Failed to get event dev info\n"); 1106 1107 evdev_set_conf_values(&devconf, &info); 1108 1109 ret = rte_event_dev_configure(evdev, &devconf); 1110 TEST_ASSERT_SUCCESS(ret, "Failed to configure eventdev\n"); 1111 1112 /* Set up event queue */ 1113 ret = rte_event_dev_attr_get(evdev, RTE_EVENT_DEV_ATTR_QUEUE_COUNT, 1114 &queue_count); 1115 TEST_ASSERT_SUCCESS(ret, "Queue count get failed\n"); 1116 TEST_ASSERT_EQUAL(queue_count, 2, "Unexpected queue count\n"); 1117 1118 qid = TEST_APP_EV_QUEUE_ID; 1119 ret = rte_event_queue_setup(evdev, qid, NULL); 1120 TEST_ASSERT_SUCCESS(ret, "Failed to setup queue=%d\n", qid); 1121 1122 queue_conf.nb_atomic_flows = info.max_event_queue_flows; 1123 queue_conf.nb_atomic_order_sequences = 32; 1124 queue_conf.schedule_type = RTE_SCHED_TYPE_ATOMIC; 1125 queue_conf.priority = RTE_EVENT_DEV_PRIORITY_HIGHEST; 1126 queue_conf.event_queue_cfg = RTE_EVENT_QUEUE_CFG_SINGLE_LINK; 1127 1128 qid = TEST_CRYPTO_EV_QUEUE_ID; 1129 ret = rte_event_queue_setup(evdev, qid, &queue_conf); 1130 TEST_ASSERT_SUCCESS(ret, "Failed to setup queue=%u\n", qid); 1131 1132 /* Set up event port */ 1133 ret = rte_event_dev_attr_get(evdev, RTE_EVENT_DEV_ATTR_PORT_COUNT, 1134 &port_count); 1135 TEST_ASSERT_SUCCESS(ret, "Port count get failed\n"); 1136 TEST_ASSERT_EQUAL(port_count, 1, "Unexpected port count\n"); 1137 1138 ret = rte_event_port_setup(evdev, TEST_APP_PORT_ID, NULL); 1139 TEST_ASSERT_SUCCESS(ret, "Failed to setup port=%d\n", 1140 TEST_APP_PORT_ID); 1141 1142 qid = TEST_APP_EV_QUEUE_ID; 1143 ret = rte_event_port_link(evdev, TEST_APP_PORT_ID, &qid, NULL, 1); 1144 TEST_ASSERT(ret >= 0, "Failed to link queue port=%d\n", 1145 TEST_APP_PORT_ID); 1146 1147 return TEST_SUCCESS; 1148 } 1149 1150 static void 1151 test_crypto_adapter_free(void) 1152 { 1153 rte_event_crypto_adapter_free(TEST_ADAPTER_ID); 1154 } 1155 1156 static int 1157 test_crypto_adapter_create(void) 1158 { 1159 struct rte_event_port_conf conf = { 1160 .dequeue_depth = 8, 1161 .enqueue_depth = 8, 1162 .new_event_threshold = 1200, 1163 }; 1164 int ret; 1165 1166 /* Create adapter with default port creation callback */ 1167 ret = rte_event_crypto_adapter_create(TEST_ADAPTER_ID, 1168 evdev, 1169 &conf, 0); 1170 TEST_ASSERT_SUCCESS(ret, "Failed to create event crypto adapter\n"); 1171 1172 return TEST_SUCCESS; 1173 } 1174 1175 static int 1176 test_crypto_adapter_qp_add_del(void) 1177 { 1178 uint32_t cap; 1179 int ret; 1180 1181 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 1182 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 1183 1184 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_QP_EV_BIND) { 1185 ret = rte_event_crypto_adapter_queue_pair_add(TEST_ADAPTER_ID, 1186 TEST_CDEV_ID, TEST_CDEV_QP_ID, &response_info); 1187 } else 1188 ret = rte_event_crypto_adapter_queue_pair_add(TEST_ADAPTER_ID, 1189 TEST_CDEV_ID, TEST_CDEV_QP_ID, NULL); 1190 1191 TEST_ASSERT_SUCCESS(ret, "Failed to create add queue pair\n"); 1192 1193 ret = rte_event_crypto_adapter_queue_pair_del(TEST_ADAPTER_ID, 1194 TEST_CDEV_ID, TEST_CDEV_QP_ID); 1195 TEST_ASSERT_SUCCESS(ret, "Failed to delete add queue pair\n"); 1196 1197 return TEST_SUCCESS; 1198 } 1199 1200 static int 1201 configure_event_crypto_adapter(enum rte_event_crypto_adapter_mode mode) 1202 { 1203 struct rte_event_port_conf conf = { 1204 .dequeue_depth = 8, 1205 .enqueue_depth = 8, 1206 .new_event_threshold = 1200, 1207 }; 1208 1209 uint32_t cap; 1210 int ret; 1211 1212 ret = rte_event_crypto_adapter_caps_get(evdev, TEST_CDEV_ID, &cap); 1213 TEST_ASSERT_SUCCESS(ret, "Failed to get adapter capabilities\n"); 1214 1215 /* Skip mode and capability mismatch check for SW eventdev */ 1216 if (!(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW) && 1217 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) && 1218 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_QP_EV_BIND)) 1219 goto adapter_create; 1220 1221 if (mode == RTE_EVENT_CRYPTO_ADAPTER_OP_FORWARD) { 1222 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_FWD) 1223 params.internal_port_op_fwd = 1; 1224 else 1225 return -ENOTSUP; 1226 } 1227 1228 if ((mode == RTE_EVENT_CRYPTO_ADAPTER_OP_NEW) && 1229 !(cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_OP_NEW)) 1230 return -ENOTSUP; 1231 1232 adapter_create: 1233 /* Create adapter with default port creation callback */ 1234 ret = rte_event_crypto_adapter_create(TEST_ADAPTER_ID, 1235 evdev, 1236 &conf, mode); 1237 TEST_ASSERT_SUCCESS(ret, "Failed to create event crypto adapter\n"); 1238 1239 if (cap & RTE_EVENT_CRYPTO_ADAPTER_CAP_INTERNAL_PORT_QP_EV_BIND) { 1240 ret = rte_event_crypto_adapter_queue_pair_add(TEST_ADAPTER_ID, 1241 TEST_CDEV_ID, TEST_CDEV_QP_ID, &response_info); 1242 } else 1243 ret = rte_event_crypto_adapter_queue_pair_add(TEST_ADAPTER_ID, 1244 TEST_CDEV_ID, TEST_CDEV_QP_ID, NULL); 1245 1246 TEST_ASSERT_SUCCESS(ret, "Failed to add queue pair\n"); 1247 1248 if (!params.internal_port_op_fwd) { 1249 ret = rte_event_crypto_adapter_event_port_get(TEST_ADAPTER_ID, 1250 ¶ms.crypto_event_port_id); 1251 TEST_ASSERT_SUCCESS(ret, "Failed to get event port\n"); 1252 } 1253 1254 return TEST_SUCCESS; 1255 } 1256 1257 static void 1258 test_crypto_adapter_stop(void) 1259 { 1260 uint32_t evdev_service_id, adapter_service_id; 1261 1262 /* retrieve service ids & stop services */ 1263 if (rte_event_crypto_adapter_service_id_get(TEST_ADAPTER_ID, 1264 &adapter_service_id) == 0) { 1265 rte_service_runstate_set(adapter_service_id, 0); 1266 rte_service_lcore_stop(slcore_id); 1267 rte_service_lcore_del(slcore_id); 1268 rte_event_crypto_adapter_stop(TEST_ADAPTER_ID); 1269 } 1270 1271 if (rte_event_dev_service_id_get(evdev, &evdev_service_id) == 0) { 1272 rte_service_runstate_set(evdev_service_id, 0); 1273 rte_service_lcore_stop(slcore_id); 1274 rte_service_lcore_del(slcore_id); 1275 rte_cryptodev_stop(TEST_CDEV_ID); 1276 rte_event_dev_stop(evdev); 1277 } else { 1278 rte_cryptodev_stop(TEST_CDEV_ID); 1279 rte_event_dev_stop(evdev); 1280 } 1281 } 1282 1283 static int 1284 test_crypto_adapter_conf(enum rte_event_crypto_adapter_mode mode) 1285 { 1286 uint32_t evdev_service_id; 1287 uint8_t qid; 1288 int ret; 1289 1290 if (!crypto_adapter_setup_done) { 1291 ret = configure_event_crypto_adapter(mode); 1292 if (ret) 1293 return ret; 1294 if (!params.internal_port_op_fwd) { 1295 qid = TEST_CRYPTO_EV_QUEUE_ID; 1296 ret = rte_event_port_link(evdev, 1297 params.crypto_event_port_id, &qid, NULL, 1); 1298 TEST_ASSERT(ret >= 0, "Failed to link queue %d " 1299 "port=%u\n", qid, 1300 params.crypto_event_port_id); 1301 } 1302 crypto_adapter_setup_done = 1; 1303 } 1304 1305 /* retrieve service ids */ 1306 if (rte_event_dev_service_id_get(evdev, &evdev_service_id) == 0) { 1307 /* add a service core and start it */ 1308 TEST_ASSERT_SUCCESS(rte_service_lcore_add(slcore_id), 1309 "Failed to add service core"); 1310 TEST_ASSERT_SUCCESS(rte_service_lcore_start(slcore_id), 1311 "Failed to start service core"); 1312 1313 /* map services to it */ 1314 TEST_ASSERT_SUCCESS(rte_service_map_lcore_set(evdev_service_id, 1315 slcore_id, 1), "Failed to map evdev service"); 1316 1317 /* set services to running */ 1318 TEST_ASSERT_SUCCESS(rte_service_runstate_set(evdev_service_id, 1319 1), "Failed to start evdev service"); 1320 } 1321 1322 /* start the eventdev */ 1323 TEST_ASSERT_SUCCESS(rte_event_dev_start(evdev), 1324 "Failed to start event device"); 1325 1326 /* start the cryptodev */ 1327 TEST_ASSERT_SUCCESS(rte_cryptodev_start(TEST_CDEV_ID), 1328 "Failed to start crypto device"); 1329 1330 return TEST_SUCCESS; 1331 } 1332 1333 static int 1334 test_crypto_adapter_conf_op_forward_mode(void) 1335 { 1336 enum rte_event_crypto_adapter_mode mode; 1337 1338 mode = RTE_EVENT_CRYPTO_ADAPTER_OP_FORWARD; 1339 1340 return test_crypto_adapter_conf(mode); 1341 } 1342 1343 static int 1344 test_crypto_adapter_conf_op_new_mode(void) 1345 { 1346 enum rte_event_crypto_adapter_mode mode; 1347 1348 mode = RTE_EVENT_CRYPTO_ADAPTER_OP_NEW; 1349 1350 return test_crypto_adapter_conf(mode); 1351 } 1352 1353 1354 static int 1355 testsuite_setup(void) 1356 { 1357 int ret; 1358 1359 slcore_id = rte_get_next_lcore(-1, 1, 0); 1360 TEST_ASSERT_NOT_EQUAL(slcore_id, RTE_MAX_LCORE, "At least 2 lcores " 1361 "are required to run this autotest\n"); 1362 1363 /* Setup and start event device. */ 1364 ret = configure_eventdev(); 1365 TEST_ASSERT_SUCCESS(ret, "Failed to setup eventdev\n"); 1366 1367 /* Setup and start crypto device. */ 1368 ret = configure_cryptodev(); 1369 TEST_ASSERT_SUCCESS(ret, "cryptodev initialization failed\n"); 1370 1371 return TEST_SUCCESS; 1372 } 1373 1374 static void 1375 crypto_adapter_teardown(void) 1376 { 1377 int ret; 1378 1379 ret = rte_event_crypto_adapter_stop(TEST_ADAPTER_ID); 1380 if (ret < 0) 1381 RTE_LOG(ERR, USER1, "Failed to stop adapter!"); 1382 1383 ret = rte_event_crypto_adapter_queue_pair_del(TEST_ADAPTER_ID, 1384 TEST_CDEV_ID, TEST_CDEV_QP_ID); 1385 if (ret < 0) 1386 RTE_LOG(ERR, USER1, "Failed to delete queue pair!"); 1387 1388 ret = rte_event_crypto_adapter_free(TEST_ADAPTER_ID); 1389 if (ret < 0) 1390 RTE_LOG(ERR, USER1, "Failed to free adapter!"); 1391 1392 crypto_adapter_setup_done = 0; 1393 } 1394 1395 static void 1396 crypto_teardown(void) 1397 { 1398 /* Free mbuf mempool */ 1399 if (params.mbuf_pool != NULL) { 1400 RTE_LOG(DEBUG, USER1, "CRYPTO_ADAPTER_MBUFPOOL count %u\n", 1401 rte_mempool_avail_count(params.mbuf_pool)); 1402 rte_mempool_free(params.mbuf_pool); 1403 params.mbuf_pool = NULL; 1404 } 1405 1406 /* Free session mempool */ 1407 if (params.session_mpool != NULL) { 1408 RTE_LOG(DEBUG, USER1, "CRYPTO_ADAPTER_SESSION_MP count %u\n", 1409 rte_mempool_avail_count(params.session_mpool)); 1410 rte_mempool_free(params.session_mpool); 1411 params.session_mpool = NULL; 1412 } 1413 if (params.session_priv_mpool != NULL) { 1414 rte_mempool_avail_count(params.session_priv_mpool); 1415 rte_mempool_free(params.session_priv_mpool); 1416 params.session_priv_mpool = NULL; 1417 } 1418 1419 /* Free asym session mempool */ 1420 if (params.asym_sess_mpool != NULL) { 1421 RTE_LOG(DEBUG, USER1, "CRYPTO_AD_ASYM_SESS_MP count %u\n", 1422 rte_mempool_avail_count(params.asym_sess_mpool)); 1423 rte_mempool_free(params.asym_sess_mpool); 1424 params.asym_sess_mpool = NULL; 1425 } 1426 /* Free asym ops mempool */ 1427 if (params.asym_op_mpool != NULL) { 1428 RTE_LOG(DEBUG, USER1, "EVENT_CRYPTO_ASYM_OP_POOL count %u\n", 1429 rte_mempool_avail_count(params.asym_op_mpool)); 1430 rte_mempool_free(params.asym_op_mpool); 1431 params.asym_op_mpool = NULL; 1432 } 1433 1434 /* Free ops mempool */ 1435 if (params.op_mpool != NULL) { 1436 RTE_LOG(DEBUG, USER1, "EVENT_CRYPTO_SYM_OP_POOL count %u\n", 1437 rte_mempool_avail_count(params.op_mpool)); 1438 rte_mempool_free(params.op_mpool); 1439 params.op_mpool = NULL; 1440 } 1441 } 1442 1443 static void 1444 eventdev_teardown(void) 1445 { 1446 rte_event_dev_stop(evdev); 1447 } 1448 1449 static void 1450 testsuite_teardown(void) 1451 { 1452 crypto_adapter_teardown(); 1453 crypto_teardown(); 1454 eventdev_teardown(); 1455 } 1456 1457 static struct unit_test_suite functional_testsuite = { 1458 .suite_name = "Event crypto adapter test suite", 1459 .setup = testsuite_setup, 1460 .teardown = testsuite_teardown, 1461 .unit_test_cases = { 1462 1463 TEST_CASE_ST(NULL, test_crypto_adapter_free, 1464 test_crypto_adapter_create), 1465 1466 TEST_CASE_ST(test_crypto_adapter_create, 1467 test_crypto_adapter_free, 1468 test_crypto_adapter_qp_add_del), 1469 1470 TEST_CASE_ST(test_crypto_adapter_create, 1471 test_crypto_adapter_free, 1472 test_crypto_adapter_stats), 1473 1474 TEST_CASE_ST(test_crypto_adapter_conf_op_forward_mode, 1475 test_crypto_adapter_stop, 1476 test_session_with_op_forward_mode), 1477 1478 TEST_CASE_ST(test_crypto_adapter_conf_op_forward_mode, 1479 test_crypto_adapter_stop, 1480 test_sessionless_with_op_forward_mode), 1481 1482 TEST_CASE_ST(test_crypto_adapter_conf_op_new_mode, 1483 test_crypto_adapter_stop, 1484 test_session_with_op_new_mode), 1485 1486 TEST_CASE_ST(test_crypto_adapter_conf_op_new_mode, 1487 test_crypto_adapter_stop, 1488 test_sessionless_with_op_new_mode), 1489 1490 TEST_CASE_ST(test_crypto_adapter_conf_op_forward_mode, 1491 test_crypto_adapter_stop, 1492 test_asym_session_with_op_forward_mode), 1493 1494 TEST_CASE_ST(test_crypto_adapter_conf_op_forward_mode, 1495 test_crypto_adapter_stop, 1496 test_asym_sessionless_with_op_forward_mode), 1497 1498 TEST_CASE_ST(test_crypto_adapter_conf_op_new_mode, 1499 test_crypto_adapter_stop, 1500 test_asym_session_with_op_new_mode), 1501 1502 TEST_CASE_ST(test_crypto_adapter_conf_op_new_mode, 1503 test_crypto_adapter_stop, 1504 test_asym_sessionless_with_op_new_mode), 1505 1506 TEST_CASES_END() /**< NULL terminate unit test array */ 1507 } 1508 }; 1509 1510 static int 1511 test_event_crypto_adapter(void) 1512 { 1513 return unit_test_suite_runner(&functional_testsuite); 1514 } 1515 1516 #endif /* !RTE_EXEC_ENV_WINDOWS */ 1517 1518 REGISTER_TEST_COMMAND(event_crypto_adapter_autotest, 1519 test_event_crypto_adapter); 1520