xref: /dpdk/app/test-compress-perf/comp_perf_test_verify.c (revision 8d54b1ec4a8be40975ae6978535bcc1431caad02)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2018 Intel Corporation
3  */
4 
5 #include <stdlib.h>
6 
7 #include <rte_malloc.h>
8 #include <rte_eal.h>
9 #include <rte_log.h>
10 #include <rte_compressdev.h>
11 
12 #include "comp_perf_test_verify.h"
13 #include "comp_perf_test_common.h"
14 
15 void
16 cperf_verify_test_destructor(void *arg)
17 {
18 	if (arg) {
19 		comp_perf_free_memory(
20 				((struct cperf_verify_ctx *)arg)->options,
21 				&((struct cperf_verify_ctx *)arg)->mem);
22 		rte_free(arg);
23 	}
24 }
25 
26 void *
27 cperf_verify_test_constructor(uint8_t dev_id, uint16_t qp_id,
28 		struct comp_test_data *options)
29 {
30 	struct cperf_verify_ctx *ctx = NULL;
31 
32 	ctx = rte_malloc(NULL, sizeof(struct cperf_verify_ctx), 0);
33 
34 	if (ctx == NULL)
35 		return NULL;
36 
37 	ctx->mem.dev_id = dev_id;
38 	ctx->mem.qp_id = qp_id;
39 	ctx->options = options;
40 
41 	if (!comp_perf_allocate_memory(ctx->options, &ctx->mem) &&
42 			!prepare_bufs(ctx->options, &ctx->mem))
43 		return ctx;
44 
45 	cperf_verify_test_destructor(ctx);
46 	return NULL;
47 }
48 
49 static int
50 main_loop(struct cperf_verify_ctx *ctx, enum rte_comp_xform_type type)
51 {
52 	struct comp_test_data *test_data = ctx->options;
53 	uint8_t *output_data_ptr = NULL;
54 	size_t *output_data_sz = NULL;
55 	struct cperf_mem_resources *mem = &ctx->mem;
56 
57 	uint8_t dev_id = mem->dev_id;
58 	uint32_t i, iter, num_iter;
59 	struct rte_comp_op **ops, **deq_ops;
60 	void *priv_xform = NULL;
61 	struct rte_comp_xform xform;
62 	size_t output_size = 0;
63 	struct rte_mbuf **input_bufs, **output_bufs;
64 	int res = 0;
65 	int allocated = 0;
66 	uint32_t out_seg_sz;
67 
68 	if (test_data == NULL || !test_data->burst_sz) {
69 		RTE_LOG(ERR, USER1,
70 			"Unknown burst size\n");
71 		return -1;
72 	}
73 
74 	ops = rte_zmalloc_socket(NULL,
75 		2 * mem->total_bufs * sizeof(struct rte_comp_op *),
76 		0, rte_socket_id());
77 
78 	if (ops == NULL) {
79 		RTE_LOG(ERR, USER1,
80 			"Can't allocate memory for ops structures\n");
81 		return -1;
82 	}
83 
84 	deq_ops = &ops[mem->total_bufs];
85 
86 	if (type == RTE_COMP_COMPRESS) {
87 		xform = (struct rte_comp_xform) {
88 			.type = RTE_COMP_COMPRESS,
89 			.compress = {
90 				.algo = RTE_COMP_ALGO_DEFLATE,
91 				.deflate.huffman = test_data->huffman_enc,
92 				.level = test_data->level,
93 				.window_size = test_data->window_sz,
94 				.chksum = RTE_COMP_CHECKSUM_NONE,
95 				.hash_algo = RTE_COMP_HASH_ALGO_NONE
96 			}
97 		};
98 		output_data_ptr = ctx->mem.compressed_data;
99 		output_data_sz = &ctx->comp_data_sz;
100 		input_bufs = mem->decomp_bufs;
101 		output_bufs = mem->comp_bufs;
102 		out_seg_sz = test_data->out_seg_sz;
103 	} else {
104 		xform = (struct rte_comp_xform) {
105 			.type = RTE_COMP_DECOMPRESS,
106 			.decompress = {
107 				.algo = RTE_COMP_ALGO_DEFLATE,
108 				.chksum = RTE_COMP_CHECKSUM_NONE,
109 				.window_size = test_data->window_sz,
110 				.hash_algo = RTE_COMP_HASH_ALGO_NONE
111 			}
112 		};
113 		output_data_ptr = ctx->mem.decompressed_data;
114 		output_data_sz = &ctx->decomp_data_sz;
115 		input_bufs = mem->comp_bufs;
116 		output_bufs = mem->decomp_bufs;
117 		out_seg_sz = test_data->seg_sz;
118 	}
119 
120 	/* Create private xform */
121 	if (rte_compressdev_private_xform_create(dev_id, &xform,
122 			&priv_xform) < 0) {
123 		RTE_LOG(ERR, USER1, "Private xform could not be created\n");
124 		res = -1;
125 		goto end;
126 	}
127 
128 	num_iter = 1;
129 
130 	for (iter = 0; iter < num_iter; iter++) {
131 		uint32_t total_ops = mem->total_bufs;
132 		uint32_t remaining_ops = mem->total_bufs;
133 		uint32_t total_deq_ops = 0;
134 		uint32_t total_enq_ops = 0;
135 		uint16_t ops_unused = 0;
136 		uint16_t num_enq = 0;
137 		uint16_t num_deq = 0;
138 
139 		output_size = 0;
140 
141 		while (remaining_ops > 0) {
142 			uint16_t num_ops = RTE_MIN(remaining_ops,
143 						   test_data->burst_sz);
144 			uint16_t ops_needed = num_ops - ops_unused;
145 
146 			/*
147 			 * Move the unused operations from the previous
148 			 * enqueue_burst call to the front, to maintain order
149 			 */
150 			if ((ops_unused > 0) && (num_enq > 0)) {
151 				size_t nb_b_to_mov =
152 				      ops_unused * sizeof(struct rte_comp_op *);
153 
154 				memmove(ops, &ops[num_enq], nb_b_to_mov);
155 			}
156 
157 			/* Allocate compression operations */
158 			if (ops_needed && !rte_comp_op_bulk_alloc(
159 						mem->op_pool,
160 						&ops[ops_unused],
161 						ops_needed)) {
162 				RTE_LOG(ERR, USER1,
163 				      "Could not allocate enough operations\n");
164 				res = -1;
165 				goto end;
166 			}
167 			allocated += ops_needed;
168 
169 			for (i = 0; i < ops_needed; i++) {
170 				/*
171 				 * Calculate next buffer to attach to operation
172 				 */
173 				uint32_t buf_id = total_enq_ops + i +
174 						ops_unused;
175 				uint16_t op_id = ops_unused + i;
176 				/* Reset all data in output buffers */
177 				struct rte_mbuf *m = output_bufs[buf_id];
178 
179 				m->pkt_len = out_seg_sz * m->nb_segs;
180 				while (m) {
181 					m->data_len = m->buf_len - m->data_off;
182 					m = m->next;
183 				}
184 				ops[op_id]->m_src = input_bufs[buf_id];
185 				ops[op_id]->m_dst = output_bufs[buf_id];
186 				ops[op_id]->src.offset = 0;
187 				ops[op_id]->src.length =
188 					rte_pktmbuf_pkt_len(input_bufs[buf_id]);
189 				ops[op_id]->dst.offset = 0;
190 				ops[op_id]->flush_flag = RTE_COMP_FLUSH_FINAL;
191 				ops[op_id]->input_chksum = buf_id;
192 				ops[op_id]->private_xform = priv_xform;
193 			}
194 
195 			if (unlikely(test_data->perf_comp_force_stop))
196 				goto end;
197 
198 			num_enq = rte_compressdev_enqueue_burst(dev_id,
199 								mem->qp_id, ops,
200 								num_ops);
201 			if (num_enq == 0) {
202 				struct rte_compressdev_stats stats;
203 
204 				rte_compressdev_stats_get(dev_id, &stats);
205 				if (stats.enqueue_err_count) {
206 					res = -1;
207 					goto end;
208 				}
209 			}
210 
211 			ops_unused = num_ops - num_enq;
212 			remaining_ops -= num_enq;
213 			total_enq_ops += num_enq;
214 
215 			num_deq = rte_compressdev_dequeue_burst(dev_id,
216 							   mem->qp_id,
217 							   deq_ops,
218 							   test_data->burst_sz);
219 			total_deq_ops += num_deq;
220 
221 			for (i = 0; i < num_deq; i++) {
222 				struct rte_comp_op *op = deq_ops[i];
223 
224 				if (op->status ==
225 				  RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED ||
226 				  op->status ==
227 				  RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
228 					RTE_LOG(ERR, USER1,
229 "Out of space error occurred due to uncompressible input data expanding to larger than destination buffer. Increase the EXPANSE_RATIO constant to use this data.\n");
230 					res = -1;
231 					goto end;
232 				} else if (op->status !=
233 						RTE_COMP_OP_STATUS_SUCCESS) {
234 					RTE_LOG(ERR, USER1,
235 						"Some operations were not successful\n");
236 					goto end;
237 				}
238 
239 				const void *read_data_addr =
240 						rte_pktmbuf_read(op->m_dst, 0,
241 						op->produced, output_data_ptr);
242 				if (read_data_addr == NULL) {
243 					RTE_LOG(ERR, USER1,
244 						"Could not copy buffer in destination\n");
245 					res = -1;
246 					goto end;
247 				}
248 
249 				if (read_data_addr != output_data_ptr)
250 					rte_memcpy(output_data_ptr,
251 						   rte_pktmbuf_mtod(op->m_dst,
252 								    uint8_t *),
253 						   op->produced);
254 				output_data_ptr += op->produced;
255 				output_size += op->produced;
256 
257 			}
258 
259 
260 			if (iter == num_iter - 1) {
261 				for (i = 0; i < num_deq; i++) {
262 					struct rte_comp_op *op = deq_ops[i];
263 					struct rte_mbuf *m = op->m_dst;
264 
265 					m->pkt_len = op->produced;
266 					uint32_t remaining_data = op->produced;
267 					uint16_t data_to_append;
268 
269 					while (remaining_data > 0) {
270 						data_to_append =
271 							RTE_MIN(remaining_data,
272 							out_seg_sz);
273 						m->data_len = data_to_append;
274 						remaining_data -=
275 								data_to_append;
276 						m = m->next;
277 					}
278 				}
279 			}
280 			rte_mempool_put_bulk(mem->op_pool,
281 					     (void **)deq_ops, num_deq);
282 			allocated -= num_deq;
283 		}
284 
285 		/* Dequeue the last operations */
286 		while (total_deq_ops < total_ops) {
287 			if (unlikely(test_data->perf_comp_force_stop))
288 				goto end;
289 
290 			num_deq = rte_compressdev_dequeue_burst(dev_id,
291 							mem->qp_id,
292 							deq_ops,
293 							test_data->burst_sz);
294 			if (num_deq == 0) {
295 				struct rte_compressdev_stats stats;
296 
297 				rte_compressdev_stats_get(dev_id, &stats);
298 				if (stats.dequeue_err_count) {
299 					res = -1;
300 					goto end;
301 				}
302 			}
303 
304 			total_deq_ops += num_deq;
305 
306 			for (i = 0; i < num_deq; i++) {
307 				struct rte_comp_op *op = deq_ops[i];
308 
309 				if (op->status ==
310 				  RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED ||
311 				  op->status ==
312 				  RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
313 					RTE_LOG(ERR, USER1,
314 "Out of space error occurred due to uncompressible input data expanding to larger than destination buffer. Increase the EXPANSE_RATIO constant to use this data.\n");
315 					res = -1;
316 					goto end;
317 				} else if (op->status !=
318 						RTE_COMP_OP_STATUS_SUCCESS) {
319 					RTE_LOG(ERR, USER1,
320 						"Some operations were not successful\n");
321 					goto end;
322 				}
323 				const void *read_data_addr =
324 						rte_pktmbuf_read(op->m_dst,
325 								 op->dst.offset,
326 						op->produced, output_data_ptr);
327 				if (read_data_addr == NULL) {
328 					RTE_LOG(ERR, USER1,
329 						"Could not copy buffer in destination\n");
330 					res = -1;
331 					goto end;
332 				}
333 
334 				if (read_data_addr != output_data_ptr)
335 					rte_memcpy(output_data_ptr,
336 						   rte_pktmbuf_mtod(
337 							op->m_dst, uint8_t *),
338 						   op->produced);
339 				output_data_ptr += op->produced;
340 				output_size += op->produced;
341 
342 			}
343 
344 			if (iter == num_iter - 1) {
345 				for (i = 0; i < num_deq; i++) {
346 					struct rte_comp_op *op = deq_ops[i];
347 					struct rte_mbuf *m = op->m_dst;
348 
349 					m->pkt_len = op->produced;
350 					uint32_t remaining_data = op->produced;
351 					uint16_t data_to_append;
352 
353 					while (remaining_data > 0) {
354 						data_to_append =
355 						RTE_MIN(remaining_data,
356 							out_seg_sz);
357 						m->data_len = data_to_append;
358 						remaining_data -=
359 								data_to_append;
360 						m = m->next;
361 					}
362 				}
363 			}
364 			rte_mempool_put_bulk(mem->op_pool,
365 					     (void **)deq_ops, num_deq);
366 			allocated -= num_deq;
367 		}
368 	}
369 
370 	if (output_data_sz)
371 		*output_data_sz = output_size;
372 end:
373 	rte_mempool_put_bulk(mem->op_pool, (void **)ops, allocated);
374 	rte_compressdev_private_xform_free(dev_id, priv_xform);
375 	rte_free(ops);
376 
377 	if (test_data->perf_comp_force_stop) {
378 		RTE_LOG(ERR, USER1,
379 		      "lcore: %d Perf. test has been aborted by user\n",
380 			mem->lcore_id);
381 		res = -1;
382 	}
383 
384 	return res;
385 }
386 
387 int
388 cperf_verify_test_runner(void *test_ctx)
389 {
390 	struct cperf_verify_ctx *ctx = test_ctx;
391 	struct comp_test_data *test_data = ctx->options;
392 	int ret = EXIT_SUCCESS;
393 	static uint16_t display_once;
394 	uint32_t lcore = rte_lcore_id();
395 
396 	ctx->mem.lcore_id = lcore;
397 
398 	test_data->ratio = 0;
399 
400 	if (main_loop(ctx, RTE_COMP_COMPRESS) < 0) {
401 		ret = EXIT_FAILURE;
402 		goto end;
403 	}
404 
405 	if (main_loop(ctx, RTE_COMP_DECOMPRESS) < 0) {
406 		ret = EXIT_FAILURE;
407 		goto end;
408 	}
409 
410 	if (ctx->decomp_data_sz != test_data->input_data_sz) {
411 		RTE_LOG(ERR, USER1,
412 	   "Decompressed data length not equal to input data length\n");
413 		RTE_LOG(ERR, USER1,
414 			"Decompressed size = %zu, expected = %zu\n",
415 			ctx->decomp_data_sz, test_data->input_data_sz);
416 		ret = EXIT_FAILURE;
417 		goto end;
418 	} else {
419 		if (memcmp(ctx->mem.decompressed_data,
420 				test_data->input_data,
421 				test_data->input_data_sz) != 0) {
422 			RTE_LOG(ERR, USER1,
423 		    "Decompressed data is not the same as file data\n");
424 			ret = EXIT_FAILURE;
425 			goto end;
426 		}
427 	}
428 
429 	ctx->ratio = (double) ctx->comp_data_sz /
430 			test_data->input_data_sz * 100;
431 
432 	uint16_t exp = 0;
433 	if (!ctx->silent) {
434 		if (__atomic_compare_exchange_n(&display_once, &exp, 1, 0,
435 				__ATOMIC_RELAXED, __ATOMIC_RELAXED)) {
436 			printf("%12s%6s%12s%17s\n",
437 			    "lcore id", "Level", "Comp size", "Comp ratio [%]");
438 		}
439 		printf("%12u%6u%12zu%17.2f\n",
440 		       ctx->mem.lcore_id,
441 		       test_data->level, ctx->comp_data_sz, ctx->ratio);
442 	}
443 
444 end:
445 	return ret;
446 }
447