1 /* SPDX-License-Identifier: BSD-3-Clause 2 * Copyright(c) 2018 Intel Corporation 3 */ 4 5 #include <rte_malloc.h> 6 #include <rte_eal.h> 7 #include <rte_log.h> 8 #include <rte_compressdev.h> 9 10 #include "comp_perf_test_verify.h" 11 12 static int 13 main_loop(struct comp_test_data *test_data, uint8_t level, 14 enum rte_comp_xform_type type, 15 uint8_t *output_data_ptr, 16 size_t *output_data_sz) 17 { 18 uint8_t dev_id = test_data->cdev_id; 19 uint32_t i, iter, num_iter; 20 struct rte_comp_op **ops, **deq_ops; 21 void *priv_xform = NULL; 22 struct rte_comp_xform xform; 23 size_t output_size = 0; 24 struct rte_mbuf **input_bufs, **output_bufs; 25 int res = 0; 26 int allocated = 0; 27 uint32_t out_seg_sz; 28 29 if (test_data == NULL || !test_data->burst_sz) { 30 RTE_LOG(ERR, USER1, 31 "Unknown burst size\n"); 32 return -1; 33 } 34 35 ops = rte_zmalloc_socket(NULL, 36 2 * test_data->total_bufs * sizeof(struct rte_comp_op *), 37 0, rte_socket_id()); 38 39 if (ops == NULL) { 40 RTE_LOG(ERR, USER1, 41 "Can't allocate memory for ops strucures\n"); 42 return -1; 43 } 44 45 deq_ops = &ops[test_data->total_bufs]; 46 47 if (type == RTE_COMP_COMPRESS) { 48 xform = (struct rte_comp_xform) { 49 .type = RTE_COMP_COMPRESS, 50 .compress = { 51 .algo = RTE_COMP_ALGO_DEFLATE, 52 .deflate.huffman = test_data->huffman_enc, 53 .level = level, 54 .window_size = test_data->window_sz, 55 .chksum = RTE_COMP_CHECKSUM_NONE, 56 .hash_algo = RTE_COMP_HASH_ALGO_NONE 57 } 58 }; 59 input_bufs = test_data->decomp_bufs; 60 output_bufs = test_data->comp_bufs; 61 out_seg_sz = test_data->out_seg_sz; 62 } else { 63 xform = (struct rte_comp_xform) { 64 .type = RTE_COMP_DECOMPRESS, 65 .decompress = { 66 .algo = RTE_COMP_ALGO_DEFLATE, 67 .chksum = RTE_COMP_CHECKSUM_NONE, 68 .window_size = test_data->window_sz, 69 .hash_algo = RTE_COMP_HASH_ALGO_NONE 70 } 71 }; 72 input_bufs = test_data->comp_bufs; 73 output_bufs = test_data->decomp_bufs; 74 out_seg_sz = test_data->seg_sz; 75 } 76 77 /* Create private xform */ 78 if (rte_compressdev_private_xform_create(dev_id, &xform, 79 &priv_xform) < 0) { 80 RTE_LOG(ERR, USER1, "Private xform could not be created\n"); 81 res = -1; 82 goto end; 83 } 84 85 num_iter = 1; 86 87 for (iter = 0; iter < num_iter; iter++) { 88 uint32_t total_ops = test_data->total_bufs; 89 uint32_t remaining_ops = test_data->total_bufs; 90 uint32_t total_deq_ops = 0; 91 uint32_t total_enq_ops = 0; 92 uint16_t ops_unused = 0; 93 uint16_t num_enq = 0; 94 uint16_t num_deq = 0; 95 96 output_size = 0; 97 98 while (remaining_ops > 0) { 99 uint16_t num_ops = RTE_MIN(remaining_ops, 100 test_data->burst_sz); 101 uint16_t ops_needed = num_ops - ops_unused; 102 103 /* 104 * Move the unused operations from the previous 105 * enqueue_burst call to the front, to maintain order 106 */ 107 if ((ops_unused > 0) && (num_enq > 0)) { 108 size_t nb_b_to_mov = 109 ops_unused * sizeof(struct rte_comp_op *); 110 111 memmove(ops, &ops[num_enq], nb_b_to_mov); 112 } 113 114 /* Allocate compression operations */ 115 if (ops_needed && !rte_comp_op_bulk_alloc( 116 test_data->op_pool, 117 &ops[ops_unused], 118 ops_needed)) { 119 RTE_LOG(ERR, USER1, 120 "Could not allocate enough operations\n"); 121 res = -1; 122 goto end; 123 } 124 allocated += ops_needed; 125 126 for (i = 0; i < ops_needed; i++) { 127 /* 128 * Calculate next buffer to attach to operation 129 */ 130 uint32_t buf_id = total_enq_ops + i + 131 ops_unused; 132 uint16_t op_id = ops_unused + i; 133 /* Reset all data in output buffers */ 134 struct rte_mbuf *m = output_bufs[buf_id]; 135 136 m->pkt_len = out_seg_sz * m->nb_segs; 137 while (m) { 138 m->data_len = m->buf_len - m->data_off; 139 m = m->next; 140 } 141 ops[op_id]->m_src = input_bufs[buf_id]; 142 ops[op_id]->m_dst = output_bufs[buf_id]; 143 ops[op_id]->src.offset = 0; 144 ops[op_id]->src.length = 145 rte_pktmbuf_pkt_len(input_bufs[buf_id]); 146 ops[op_id]->dst.offset = 0; 147 ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL; 148 ops[op_id]->input_chksum = buf_id; 149 ops[op_id]->private_xform = priv_xform; 150 } 151 152 num_enq = rte_compressdev_enqueue_burst(dev_id, 0, ops, 153 num_ops); 154 if (num_enq == 0) { 155 struct rte_compressdev_stats stats; 156 157 rte_compressdev_stats_get(dev_id, &stats); 158 if (stats.enqueue_err_count) { 159 res = -1; 160 goto end; 161 } 162 } 163 164 ops_unused = num_ops - num_enq; 165 remaining_ops -= num_enq; 166 total_enq_ops += num_enq; 167 168 num_deq = rte_compressdev_dequeue_burst(dev_id, 0, 169 deq_ops, 170 test_data->burst_sz); 171 total_deq_ops += num_deq; 172 173 for (i = 0; i < num_deq; i++) { 174 struct rte_comp_op *op = deq_ops[i]; 175 176 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) { 177 RTE_LOG(ERR, USER1, 178 "Some operations were not successful\n"); 179 goto end; 180 } 181 182 const void *read_data_addr = 183 rte_pktmbuf_read(op->m_dst, 0, 184 op->produced, output_data_ptr); 185 if (read_data_addr == NULL) { 186 RTE_LOG(ERR, USER1, 187 "Could not copy buffer in destination\n"); 188 res = -1; 189 goto end; 190 } 191 192 if (read_data_addr != output_data_ptr) 193 rte_memcpy(output_data_ptr, 194 rte_pktmbuf_mtod(op->m_dst, 195 uint8_t *), 196 op->produced); 197 output_data_ptr += op->produced; 198 output_size += op->produced; 199 200 } 201 202 203 if (iter == num_iter - 1) { 204 for (i = 0; i < num_deq; i++) { 205 struct rte_comp_op *op = deq_ops[i]; 206 struct rte_mbuf *m = op->m_dst; 207 208 m->pkt_len = op->produced; 209 uint32_t remaining_data = op->produced; 210 uint16_t data_to_append; 211 212 while (remaining_data > 0) { 213 data_to_append = 214 RTE_MIN(remaining_data, 215 out_seg_sz); 216 m->data_len = data_to_append; 217 remaining_data -= 218 data_to_append; 219 m = m->next; 220 } 221 } 222 } 223 rte_mempool_put_bulk(test_data->op_pool, 224 (void **)deq_ops, num_deq); 225 allocated -= num_deq; 226 } 227 228 /* Dequeue the last operations */ 229 while (total_deq_ops < total_ops) { 230 num_deq = rte_compressdev_dequeue_burst(dev_id, 0, 231 deq_ops, test_data->burst_sz); 232 if (num_deq == 0) { 233 struct rte_compressdev_stats stats; 234 235 rte_compressdev_stats_get(dev_id, &stats); 236 if (stats.dequeue_err_count) { 237 res = -1; 238 goto end; 239 } 240 } 241 242 total_deq_ops += num_deq; 243 244 for (i = 0; i < num_deq; i++) { 245 struct rte_comp_op *op = deq_ops[i]; 246 247 if (op->status != RTE_COMP_OP_STATUS_SUCCESS) { 248 RTE_LOG(ERR, USER1, 249 "Some operations were not successful\n"); 250 goto end; 251 } 252 253 const void *read_data_addr = 254 rte_pktmbuf_read(op->m_dst, 255 op->dst.offset, 256 op->produced, output_data_ptr); 257 if (read_data_addr == NULL) { 258 RTE_LOG(ERR, USER1, 259 "Could not copy buffer in destination\n"); 260 res = -1; 261 goto end; 262 } 263 264 if (read_data_addr != output_data_ptr) 265 rte_memcpy(output_data_ptr, 266 rte_pktmbuf_mtod( 267 op->m_dst, uint8_t *), 268 op->produced); 269 output_data_ptr += op->produced; 270 output_size += op->produced; 271 272 } 273 274 if (iter == num_iter - 1) { 275 for (i = 0; i < num_deq; i++) { 276 struct rte_comp_op *op = deq_ops[i]; 277 struct rte_mbuf *m = op->m_dst; 278 279 m->pkt_len = op->produced; 280 uint32_t remaining_data = op->produced; 281 uint16_t data_to_append; 282 283 while (remaining_data > 0) { 284 data_to_append = 285 RTE_MIN(remaining_data, 286 out_seg_sz); 287 m->data_len = data_to_append; 288 remaining_data -= 289 data_to_append; 290 m = m->next; 291 } 292 } 293 } 294 rte_mempool_put_bulk(test_data->op_pool, 295 (void **)deq_ops, num_deq); 296 allocated -= num_deq; 297 } 298 } 299 300 if (output_data_sz) 301 *output_data_sz = output_size; 302 end: 303 rte_mempool_put_bulk(test_data->op_pool, (void **)ops, allocated); 304 rte_compressdev_private_xform_free(dev_id, priv_xform); 305 rte_free(ops); 306 return res; 307 } 308 309 310 311 int 312 cperf_verification(struct comp_test_data *test_data, uint8_t level) 313 { 314 int ret = EXIT_SUCCESS; 315 316 test_data->ratio = 0; 317 318 if (main_loop(test_data, level, RTE_COMP_COMPRESS, 319 test_data->compressed_data, 320 &test_data->comp_data_sz) < 0) { 321 ret = EXIT_FAILURE; 322 goto end; 323 } 324 325 if (main_loop(test_data, level, RTE_COMP_DECOMPRESS, 326 test_data->decompressed_data, 327 &test_data->decomp_data_sz) < 0) { 328 ret = EXIT_FAILURE; 329 goto end; 330 } 331 332 if (test_data->decomp_data_sz != test_data->input_data_sz) { 333 RTE_LOG(ERR, USER1, 334 "Decompressed data length not equal to input data length\n"); 335 RTE_LOG(ERR, USER1, 336 "Decompressed size = %zu, expected = %zu\n", 337 test_data->decomp_data_sz, test_data->input_data_sz); 338 ret = EXIT_FAILURE; 339 goto end; 340 } else { 341 if (memcmp(test_data->decompressed_data, 342 test_data->input_data, 343 test_data->input_data_sz) != 0) { 344 RTE_LOG(ERR, USER1, 345 "Decompressed data is not the same as file data\n"); 346 ret = EXIT_FAILURE; 347 goto end; 348 } 349 } 350 351 test_data->ratio = (double) test_data->comp_data_sz / 352 test_data->input_data_sz * 100; 353 354 end: 355 return ret; 356 } 357