1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Intel Corporation 3 */ 4 5 #include <rte_malloc.h> 6 #include <rte_eal.h> 7 #include <rte_log.h> 8 #include <rte_compressdev.h> 9 10 #include "comp_perf_test_verify.h" 11 #include "comp_perf_test_common.h" 12 13 void 14 cperf_verify_test_destructor(void *arg) 15 { 16 if (arg) { 17 comp_perf_free_memory(&((struct cperf_verify_ctx *)arg)->mem); 18 rte_free(arg); 19 } 20 } 21 22 void * 23 cperf_verify_test_constructor(uint8_t dev_id, uint16_t qp_id, 24 struct comp_test_data *options) 25 { 26 struct cperf_verify_ctx *ctx = NULL; 27 28 ctx = rte_malloc(NULL, sizeof(struct cperf_verify_ctx), 0); 29 30 if (ctx == NULL) 31 return NULL; 32 33 ctx->mem.dev_id = dev_id; 34 ctx->mem.qp_id = qp_id; 35 ctx->options = options; 36 37 if (!comp_perf_allocate_memory(ctx->options, &ctx->mem) && 38 !prepare_bufs(ctx->options, &ctx->mem)) 39 return ctx; 40 41 cperf_verify_test_destructor(ctx); 42 return NULL; 43 } 44 45 static int 46 main_loop(struct cperf_verify_ctx *ctx, enum rte_comp_xform_type type) 47 { 48 struct comp_test_data *test_data = ctx->options; 49 uint8_t *output_data_ptr; 50 size_t *output_data_sz; 51 struct cperf_mem_resources *mem = &ctx->mem; 52 53 uint8_t dev_id = mem->dev_id; 54 uint32_t i, iter, num_iter; 55 struct rte_comp_op **ops, **deq_ops; 56 void *priv_xform = NULL; 57 struct rte_comp_xform xform; 58 size_t output_size = 0; 59 struct rte_mbuf **input_bufs, **output_bufs; 60 int res = 0; 61 int allocated = 0; 62 uint32_t out_seg_sz; 63 64 if (test_data == NULL || !test_data->burst_sz) { 65 RTE_LOG(ERR, USER1, 66 "Unknown burst size\n"); 67 return -1; 68 } 69 70 ops = rte_zmalloc_socket(NULL, 71 2 * mem->total_bufs * sizeof(struct rte_comp_op *), 72 0, rte_socket_id()); 73 74 if (ops == NULL) { 75 RTE_LOG(ERR, USER1, 76 "Can't allocate memory for ops strucures\n"); 77 return -1; 78 } 79 80 deq_ops = &ops[mem->total_bufs]; 81 82 if (type == RTE_COMP_COMPRESS) { 83 xform = (struct rte_comp_xform) { 84 .type = RTE_COMP_COMPRESS, 85 .compress = { 86 .algo = RTE_COMP_ALGO_DEFLATE, 87 .deflate.huffman = test_data->huffman_enc, 88 .level = test_data->level, 89 .window_size = test_data->window_sz, 90 .chksum = RTE_COMP_CHECKSUM_NONE, 91 .hash_algo = RTE_COMP_HASH_ALGO_NONE 92 } 93 }; 94 output_data_ptr = ctx->mem.compressed_data; 95 output_data_sz = &ctx->comp_data_sz; 96 input_bufs = mem->decomp_bufs; 97 output_bufs = mem->comp_bufs; 98 out_seg_sz = test_data->out_seg_sz; 99 } else { 100 xform = (struct rte_comp_xform) { 101 .type = RTE_COMP_DECOMPRESS, 102 .decompress = { 103 .algo = RTE_COMP_ALGO_DEFLATE, 104 .chksum = RTE_COMP_CHECKSUM_NONE, 105 .window_size = test_data->window_sz, 106 .hash_algo = RTE_COMP_HASH_ALGO_NONE 107 } 108 }; 109 output_data_ptr = ctx->mem.decompressed_data; 110 output_data_sz = &ctx->decomp_data_sz; 111 input_bufs = mem->comp_bufs; 112 output_bufs = mem->decomp_bufs; 113 out_seg_sz = test_data->seg_sz; 114 } 115 116 /* Create private xform */ 117 if (rte_compressdev_private_xform_create(dev_id, &xform, 118 &priv_xform) < 0) { 119 RTE_LOG(ERR, USER1, "Private xform could not be created\n"); 120 res = -1; 121 goto end; 122 } 123 124 num_iter = 1; 125 126 for (iter = 0; iter < num_iter; iter++) { 127 uint32_t total_ops = mem->total_bufs; 128 uint32_t remaining_ops = mem->total_bufs; 129 uint32_t total_deq_ops = 0; 130 uint32_t total_enq_ops = 0; 131 uint16_t ops_unused = 0; 132 uint16_t num_enq = 0; 133 uint16_t num_deq = 0; 134 135 output_size = 0; 136 137 while (remaining_ops > 0) { 138 uint16_t num_ops = RTE_MIN(remaining_ops, 139 test_data->burst_sz); 140 uint16_t ops_needed = num_ops - ops_unused; 141 142 /* 143 * Move the unused operations from the previous 144 * enqueue_burst call to the front, to maintain order 145 */ 146 if ((ops_unused > 0) && (num_enq > 0)) { 147 size_t nb_b_to_mov = 148 ops_unused * sizeof(struct rte_comp_op *); 149 150 memmove(ops, &ops[num_enq], nb_b_to_mov); 151 } 152 153 /* Allocate compression operations */ 154 if (ops_needed && !rte_comp_op_bulk_alloc( 155 mem->op_pool, 156 &ops[ops_unused], 157 ops_needed)) { 158 RTE_LOG(ERR, USER1, 159 "Could not allocate enough operations\n"); 160 res = -1; 161 goto end; 162 } 163 allocated += ops_needed; 164 165 for (i = 0; i < ops_needed; i++) { 166 /* 167 * Calculate next buffer to attach to operation 168 */ 169 uint32_t buf_id = total_enq_ops + i + 170 ops_unused; 171 uint16_t op_id = ops_unused + i; 172 /* Reset all data in output buffers */ 173 struct rte_mbuf *m = output_bufs[buf_id]; 174 175 m->pkt_len = out_seg_sz * m->nb_segs; 176 while (m) { 177 m->data_len = m->buf_len - m->data_off; 178 m = m->next; 179 } 180 ops[op_id]->m_src = input_bufs[buf_id]; 181 ops[op_id]->m_dst = output_bufs[buf_id]; 182 ops[op_id]->src.offset = 0; 183 ops[op_id]->src.length = 184 rte_pktmbuf_pkt_len(input_bufs[buf_id]); 185 ops[op_id]->dst.offset = 0; 186 ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL; 187 ops[op_id]->input_chksum = buf_id; 188 ops[op_id]->private_xform = priv_xform; 189 } 190 191 if (unlikely(test_data->perf_comp_force_stop)) 192 goto end; 193 194 num_enq = rte_compressdev_enqueue_burst(dev_id, 195 mem->qp_id, ops, 196 num_ops); 197 if (num_enq == 0) { 198 struct rte_compressdev_stats stats; 199 200 rte_compressdev_stats_get(dev_id, &stats); 201 if (stats.enqueue_err_count) { 202 res = -1; 203 goto end; 204 } 205 } 206 207 ops_unused = num_ops - num_enq; 208 remaining_ops -= num_enq; 209 total_enq_ops += num_enq; 210 211 num_deq = rte_compressdev_dequeue_burst(dev_id, 212 mem->qp_id, 213 deq_ops, 214 test_data->burst_sz); 215 total_deq_ops += num_deq; 216 217 for (i = 0; i < num_deq; i++) { 218 struct rte_comp_op *op = deq_ops[i]; 219 220 if (op->status == 221 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED || 222 op->status == 223 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) { 224 RTE_LOG(ERR, USER1, 225 "Out of space error occurred due to uncompressible input data expanding to larger than destination buffer. Increase the EXPANSE_RATIO constant to use this data.\n"); 226 res = -1; 227 goto end; 228 } else if (op->status != 229 RTE_COMP_OP_STATUS_SUCCESS) { 230 RTE_LOG(ERR, USER1, 231 "Some operations were not successful\n"); 232 goto end; 233 } 234 235 const void *read_data_addr = 236 rte_pktmbuf_read(op->m_dst, 0, 237 op->produced, output_data_ptr); 238 if (read_data_addr == NULL) { 239 RTE_LOG(ERR, USER1, 240 "Could not copy buffer in destination\n"); 241 res = -1; 242 goto end; 243 } 244 245 if (read_data_addr != output_data_ptr) 246 rte_memcpy(output_data_ptr, 247 rte_pktmbuf_mtod(op->m_dst, 248 uint8_t *), 249 op->produced); 250 output_data_ptr += op->produced; 251 output_size += op->produced; 252 253 } 254 255 256 if (iter == num_iter - 1) { 257 for (i = 0; i < num_deq; i++) { 258 struct rte_comp_op *op = deq_ops[i]; 259 struct rte_mbuf *m = op->m_dst; 260 261 m->pkt_len = op->produced; 262 uint32_t remaining_data = op->produced; 263 uint16_t data_to_append; 264 265 while (remaining_data > 0) { 266 data_to_append = 267 RTE_MIN(remaining_data, 268 out_seg_sz); 269 m->data_len = data_to_append; 270 remaining_data -= 271 data_to_append; 272 m = m->next; 273 } 274 } 275 } 276 rte_mempool_put_bulk(mem->op_pool, 277 (void **)deq_ops, num_deq); 278 allocated -= num_deq; 279 } 280 281 /* Dequeue the last operations */ 282 while (total_deq_ops < total_ops) { 283 if (unlikely(test_data->perf_comp_force_stop)) 284 goto end; 285 286 num_deq = rte_compressdev_dequeue_burst(dev_id, 287 mem->qp_id, 288 deq_ops, 289 test_data->burst_sz); 290 if (num_deq == 0) { 291 struct rte_compressdev_stats stats; 292 293 rte_compressdev_stats_get(dev_id, &stats); 294 if (stats.dequeue_err_count) { 295 res = -1; 296 goto end; 297 } 298 } 299 300 total_deq_ops += num_deq; 301 302 for (i = 0; i < num_deq; i++) { 303 struct rte_comp_op *op = deq_ops[i]; 304 305 if (op->status == 306 RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED || 307 op->status == 308 RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) { 309 RTE_LOG(ERR, USER1, 310 "Out of space error occurred due to uncompressible input data expanding to larger than destination buffer. Increase the EXPANSE_RATIO constant to use this data.\n"); 311 res = -1; 312 goto end; 313 } else if (op->status != 314 RTE_COMP_OP_STATUS_SUCCESS) { 315 RTE_LOG(ERR, USER1, 316 "Some operations were not successful\n"); 317 goto end; 318 } 319 const void *read_data_addr = 320 rte_pktmbuf_read(op->m_dst, 321 op->dst.offset, 322 op->produced, output_data_ptr); 323 if (read_data_addr == NULL) { 324 RTE_LOG(ERR, USER1, 325 "Could not copy buffer in destination\n"); 326 res = -1; 327 goto end; 328 } 329 330 if (read_data_addr != output_data_ptr) 331 rte_memcpy(output_data_ptr, 332 rte_pktmbuf_mtod( 333 op->m_dst, uint8_t *), 334 op->produced); 335 output_data_ptr += op->produced; 336 output_size += op->produced; 337 338 } 339 340 if (iter == num_iter - 1) { 341 for (i = 0; i < num_deq; i++) { 342 struct rte_comp_op *op = deq_ops[i]; 343 struct rte_mbuf *m = op->m_dst; 344 345 m->pkt_len = op->produced; 346 uint32_t remaining_data = op->produced; 347 uint16_t data_to_append; 348 349 while (remaining_data > 0) { 350 data_to_append = 351 RTE_MIN(remaining_data, 352 out_seg_sz); 353 m->data_len = data_to_append; 354 remaining_data -= 355 data_to_append; 356 m = m->next; 357 } 358 } 359 } 360 rte_mempool_put_bulk(mem->op_pool, 361 (void **)deq_ops, num_deq); 362 allocated -= num_deq; 363 } 364 } 365 366 if (output_data_sz) 367 *output_data_sz = output_size; 368 end: 369 rte_mempool_put_bulk(mem->op_pool, (void **)ops, allocated); 370 rte_compressdev_private_xform_free(dev_id, priv_xform); 371 rte_free(ops); 372 373 if (test_data->perf_comp_force_stop) { 374 RTE_LOG(ERR, USER1, 375 "lcore: %d Perf. test has been aborted by user\n", 376 mem->lcore_id); 377 res = -1; 378 } 379 380 return res; 381 } 382 383 int 384 cperf_verify_test_runner(void *test_ctx) 385 { 386 struct cperf_verify_ctx *ctx = test_ctx; 387 struct comp_test_data *test_data = ctx->options; 388 int ret = EXIT_SUCCESS; 389 static rte_atomic16_t display_once = RTE_ATOMIC16_INIT(0); 390 uint32_t lcore = rte_lcore_id(); 391 392 ctx->mem.lcore_id = lcore; 393 394 test_data->ratio = 0; 395 396 if (main_loop(ctx, RTE_COMP_COMPRESS) < 0) { 397 ret = EXIT_FAILURE; 398 goto end; 399 } 400 401 if (main_loop(ctx, RTE_COMP_DECOMPRESS) < 0) { 402 ret = EXIT_FAILURE; 403 goto end; 404 } 405 406 if (ctx->decomp_data_sz != test_data->input_data_sz) { 407 RTE_LOG(ERR, USER1, 408 "Decompressed data length not equal to input data length\n"); 409 RTE_LOG(ERR, USER1, 410 "Decompressed size = %zu, expected = %zu\n", 411 ctx->decomp_data_sz, test_data->input_data_sz); 412 ret = EXIT_FAILURE; 413 goto end; 414 } else { 415 if (memcmp(ctx->mem.decompressed_data, 416 test_data->input_data, 417 test_data->input_data_sz) != 0) { 418 RTE_LOG(ERR, USER1, 419 "Decompressed data is not the same as file data\n"); 420 ret = EXIT_FAILURE; 421 goto end; 422 } 423 } 424 425 ctx->ratio = (double) ctx->comp_data_sz / 426 test_data->input_data_sz * 100; 427 428 if (!ctx->silent) { 429 if (rte_atomic16_test_and_set(&display_once)) { 430 printf("%12s%6s%12s%17s\n", 431 "lcore id", "Level", "Comp size", "Comp ratio [%]"); 432 } 433 printf("%12u%6u%12zu%17.2f\n", 434 ctx->mem.lcore_id, 435 test_data->level, ctx->comp_data_sz, ctx->ratio); 436 } 437 438 end: 439 return ret; 440 } 441