xref: /dpdk/app/test/test_compressdev.c (revision 68a03efeed657e6e05f281479b33b51102797e15)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2018 - 2019 Intel Corporation
3  */
4 #include <string.h>
5 #include <zlib.h>
6 #include <math.h>
7 #include <stdlib.h>
8 #include <unistd.h>
9 #include <stdio.h>
10 
11 #include <rte_cycles.h>
12 #include <rte_malloc.h>
13 #include <rte_mempool.h>
14 #include <rte_mbuf.h>
15 #include <rte_compressdev.h>
16 #include <rte_string_fns.h>
17 
18 #include "test_compressdev_test_buffer.h"
19 #include "test.h"
20 
21 #define DIV_CEIL(a, b)  ((a) / (b) + ((a) % (b) != 0))
22 
23 #define DEFAULT_WINDOW_SIZE 15
24 #define DEFAULT_MEM_LEVEL 8
25 #define MAX_DEQD_RETRIES 10
26 #define DEQUEUE_WAIT_TIME 10000
27 
28 /*
29  * 30% extra size for compressed data compared to original data,
30  * in case data size cannot be reduced and it is actually bigger
31  * due to the compress block headers
32  */
33 #define COMPRESS_BUF_SIZE_RATIO 1.3
34 #define COMPRESS_BUF_SIZE_RATIO_DISABLED 1.0
35 #define COMPRESS_BUF_SIZE_RATIO_OVERFLOW 0.2
36 #define NUM_LARGE_MBUFS 16
37 #define SMALL_SEG_SIZE 256
38 #define MAX_SEGS 16
39 #define NUM_OPS 16
40 #define NUM_MAX_XFORMS 16
41 #define NUM_MAX_INFLIGHT_OPS 128
42 #define CACHE_SIZE 0
43 
44 #define ZLIB_CRC_CHECKSUM_WINDOW_BITS 31
45 #define ZLIB_HEADER_SIZE 2
46 #define ZLIB_TRAILER_SIZE 4
47 #define GZIP_HEADER_SIZE 10
48 #define GZIP_TRAILER_SIZE 8
49 
50 #define OUT_OF_SPACE_BUF 1
51 
52 #define MAX_MBUF_SEGMENT_SIZE 65535
53 #define MAX_DATA_MBUF_SIZE (MAX_MBUF_SEGMENT_SIZE - RTE_PKTMBUF_HEADROOM)
54 #define NUM_BIG_MBUFS (512 + 1)
55 #define BIG_DATA_TEST_SIZE (MAX_DATA_MBUF_SIZE * 2)
56 
57 /* constants for "im buffer" tests start here */
58 
59 /* number of mbufs lower than number of inflight ops */
60 #define IM_BUF_NUM_MBUFS 3
61 /* above threshold (QAT_FALLBACK_THLD) and below max mbuf size */
62 #define IM_BUF_DATA_TEST_SIZE_LB 59600
63 /* data size smaller than the queue capacity */
64 #define IM_BUF_DATA_TEST_SIZE_SGL (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS)
65 /* number of mbufs bigger than number of inflight ops */
66 #define IM_BUF_NUM_MBUFS_OVER (NUM_MAX_INFLIGHT_OPS + 1)
67 /* data size bigger than the queue capacity */
68 #define IM_BUF_DATA_TEST_SIZE_OVER (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_OVER)
69 /* number of mid-size mbufs */
70 #define IM_BUF_NUM_MBUFS_MID ((NUM_MAX_INFLIGHT_OPS / 3) + 1)
71 /* capacity of mid-size mbufs */
72 #define IM_BUF_DATA_TEST_SIZE_MID (MAX_DATA_MBUF_SIZE * IM_BUF_NUM_MBUFS_MID)
73 
74 
75 const char *
76 huffman_type_strings[] = {
77 	[RTE_COMP_HUFFMAN_DEFAULT]	= "PMD default",
78 	[RTE_COMP_HUFFMAN_FIXED]	= "Fixed",
79 	[RTE_COMP_HUFFMAN_DYNAMIC]	= "Dynamic"
80 };
81 
82 enum zlib_direction {
83 	ZLIB_NONE,
84 	ZLIB_COMPRESS,
85 	ZLIB_DECOMPRESS,
86 	ZLIB_ALL
87 };
88 
89 enum varied_buff {
90 	LB_BOTH = 0,	/* both input and output are linear*/
91 	SGL_BOTH,	/* both input and output are chained */
92 	SGL_TO_LB,	/* input buffer is chained */
93 	LB_TO_SGL	/* output buffer is chained */
94 };
95 
96 enum overflow_test {
97 	OVERFLOW_DISABLED,
98 	OVERFLOW_ENABLED
99 };
100 
101 enum ratio_switch {
102 	RATIO_DISABLED,
103 	RATIO_ENABLED
104 };
105 
106 enum operation_type {
107 	OPERATION_COMPRESSION,
108 	OPERATION_DECOMPRESSION
109 };
110 
111 struct priv_op_data {
112 	uint16_t orig_idx;
113 };
114 
115 struct comp_testsuite_params {
116 	struct rte_mempool *large_mbuf_pool;
117 	struct rte_mempool *small_mbuf_pool;
118 	struct rte_mempool *big_mbuf_pool;
119 	struct rte_mempool *op_pool;
120 	struct rte_comp_xform *def_comp_xform;
121 	struct rte_comp_xform *def_decomp_xform;
122 };
123 
124 struct interim_data_params {
125 	const char * const *test_bufs;
126 	unsigned int num_bufs;
127 	uint16_t *buf_idx;
128 	struct rte_comp_xform **compress_xforms;
129 	struct rte_comp_xform **decompress_xforms;
130 	unsigned int num_xforms;
131 };
132 
133 struct test_data_params {
134 	enum rte_comp_op_type compress_state;
135 	enum rte_comp_op_type decompress_state;
136 	enum varied_buff buff_type;
137 	enum zlib_direction zlib_dir;
138 	unsigned int out_of_space;
139 	unsigned int big_data;
140 	/* stateful decompression specific parameters */
141 	unsigned int decompress_output_block_size;
142 	unsigned int decompress_steps_max;
143 	/* external mbufs specific parameters */
144 	unsigned int use_external_mbufs;
145 	unsigned int inbuf_data_size;
146 	const struct rte_memzone *inbuf_memzone;
147 	const struct rte_memzone *compbuf_memzone;
148 	const struct rte_memzone *uncompbuf_memzone;
149 	/* overflow test activation */
150 	enum overflow_test overflow;
151 	enum ratio_switch ratio;
152 };
153 
154 struct test_private_arrays {
155 	struct rte_mbuf **uncomp_bufs;
156 	struct rte_mbuf **comp_bufs;
157 	struct rte_comp_op **ops;
158 	struct rte_comp_op **ops_processed;
159 	void **priv_xforms;
160 	uint64_t *compress_checksum;
161 	uint32_t *compressed_data_size;
162 	void **stream;
163 	char **all_decomp_data;
164 	unsigned int *decomp_produced_data_size;
165 	uint16_t num_priv_xforms;
166 };
167 
168 static struct comp_testsuite_params testsuite_params = { 0 };
169 
170 
171 static void
172 testsuite_teardown(void)
173 {
174 	struct comp_testsuite_params *ts_params = &testsuite_params;
175 
176 	if (rte_mempool_in_use_count(ts_params->large_mbuf_pool))
177 		RTE_LOG(ERR, USER1, "Large mbuf pool still has unfreed bufs\n");
178 	if (rte_mempool_in_use_count(ts_params->small_mbuf_pool))
179 		RTE_LOG(ERR, USER1, "Small mbuf pool still has unfreed bufs\n");
180 	if (rte_mempool_in_use_count(ts_params->big_mbuf_pool))
181 		RTE_LOG(ERR, USER1, "Big mbuf pool still has unfreed bufs\n");
182 	if (rte_mempool_in_use_count(ts_params->op_pool))
183 		RTE_LOG(ERR, USER1, "op pool still has unfreed ops\n");
184 
185 	rte_mempool_free(ts_params->large_mbuf_pool);
186 	rte_mempool_free(ts_params->small_mbuf_pool);
187 	rte_mempool_free(ts_params->big_mbuf_pool);
188 	rte_mempool_free(ts_params->op_pool);
189 	rte_free(ts_params->def_comp_xform);
190 	rte_free(ts_params->def_decomp_xform);
191 }
192 
193 static int
194 testsuite_setup(void)
195 {
196 	struct comp_testsuite_params *ts_params = &testsuite_params;
197 	uint32_t max_buf_size = 0;
198 	unsigned int i;
199 
200 	if (rte_compressdev_count() == 0) {
201 		RTE_LOG(WARNING, USER1, "Need at least one compress device\n");
202 		return TEST_SKIPPED;
203 	}
204 
205 	RTE_LOG(NOTICE, USER1, "Running tests on device %s\n",
206 				rte_compressdev_name_get(0));
207 
208 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
209 		max_buf_size = RTE_MAX(max_buf_size,
210 				strlen(compress_test_bufs[i]) + 1);
211 
212 	/*
213 	 * Buffers to be used in compression and decompression.
214 	 * Since decompressed data might be larger than
215 	 * compressed data (due to block header),
216 	 * buffers should be big enough for both cases.
217 	 */
218 	max_buf_size *= COMPRESS_BUF_SIZE_RATIO;
219 	ts_params->large_mbuf_pool = rte_pktmbuf_pool_create("large_mbuf_pool",
220 			NUM_LARGE_MBUFS,
221 			CACHE_SIZE, 0,
222 			max_buf_size + RTE_PKTMBUF_HEADROOM,
223 			rte_socket_id());
224 	if (ts_params->large_mbuf_pool == NULL) {
225 		RTE_LOG(ERR, USER1, "Large mbuf pool could not be created\n");
226 		return TEST_FAILED;
227 	}
228 
229 	/* Create mempool with smaller buffers for SGL testing */
230 	ts_params->small_mbuf_pool = rte_pktmbuf_pool_create("small_mbuf_pool",
231 			NUM_LARGE_MBUFS * MAX_SEGS,
232 			CACHE_SIZE, 0,
233 			SMALL_SEG_SIZE + RTE_PKTMBUF_HEADROOM,
234 			rte_socket_id());
235 	if (ts_params->small_mbuf_pool == NULL) {
236 		RTE_LOG(ERR, USER1, "Small mbuf pool could not be created\n");
237 		goto exit;
238 	}
239 
240 	/* Create mempool with big buffers for SGL testing */
241 	ts_params->big_mbuf_pool = rte_pktmbuf_pool_create("big_mbuf_pool",
242 			NUM_BIG_MBUFS + 1,
243 			CACHE_SIZE, 0,
244 			MAX_MBUF_SEGMENT_SIZE,
245 			rte_socket_id());
246 	if (ts_params->big_mbuf_pool == NULL) {
247 		RTE_LOG(ERR, USER1, "Big mbuf pool could not be created\n");
248 		goto exit;
249 	}
250 
251 	ts_params->op_pool = rte_comp_op_pool_create("op_pool", NUM_OPS,
252 				0, sizeof(struct priv_op_data),
253 				rte_socket_id());
254 	if (ts_params->op_pool == NULL) {
255 		RTE_LOG(ERR, USER1, "Operation pool could not be created\n");
256 		goto exit;
257 	}
258 
259 	ts_params->def_comp_xform =
260 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
261 	if (ts_params->def_comp_xform == NULL) {
262 		RTE_LOG(ERR, USER1,
263 			"Default compress xform could not be created\n");
264 		goto exit;
265 	}
266 	ts_params->def_decomp_xform =
267 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
268 	if (ts_params->def_decomp_xform == NULL) {
269 		RTE_LOG(ERR, USER1,
270 			"Default decompress xform could not be created\n");
271 		goto exit;
272 	}
273 
274 	/* Initializes default values for compress/decompress xforms */
275 	ts_params->def_comp_xform->type = RTE_COMP_COMPRESS;
276 	ts_params->def_comp_xform->compress.algo = RTE_COMP_ALGO_DEFLATE,
277 	ts_params->def_comp_xform->compress.deflate.huffman =
278 						RTE_COMP_HUFFMAN_DEFAULT;
279 	ts_params->def_comp_xform->compress.level = RTE_COMP_LEVEL_PMD_DEFAULT;
280 	ts_params->def_comp_xform->compress.chksum = RTE_COMP_CHECKSUM_NONE;
281 	ts_params->def_comp_xform->compress.window_size = DEFAULT_WINDOW_SIZE;
282 
283 	ts_params->def_decomp_xform->type = RTE_COMP_DECOMPRESS;
284 	ts_params->def_decomp_xform->decompress.algo = RTE_COMP_ALGO_DEFLATE,
285 	ts_params->def_decomp_xform->decompress.chksum = RTE_COMP_CHECKSUM_NONE;
286 	ts_params->def_decomp_xform->decompress.window_size = DEFAULT_WINDOW_SIZE;
287 
288 	return TEST_SUCCESS;
289 
290 exit:
291 	testsuite_teardown();
292 
293 	return TEST_FAILED;
294 }
295 
296 static int
297 generic_ut_setup(void)
298 {
299 	/* Configure compressdev (one device, one queue pair) */
300 	struct rte_compressdev_config config = {
301 		.socket_id = rte_socket_id(),
302 		.nb_queue_pairs = 1,
303 		.max_nb_priv_xforms = NUM_MAX_XFORMS,
304 		.max_nb_streams = 1
305 	};
306 
307 	if (rte_compressdev_configure(0, &config) < 0) {
308 		RTE_LOG(ERR, USER1, "Device configuration failed\n");
309 		return -1;
310 	}
311 
312 	if (rte_compressdev_queue_pair_setup(0, 0, NUM_MAX_INFLIGHT_OPS,
313 			rte_socket_id()) < 0) {
314 		RTE_LOG(ERR, USER1, "Queue pair setup failed\n");
315 		return -1;
316 	}
317 
318 	if (rte_compressdev_start(0) < 0) {
319 		RTE_LOG(ERR, USER1, "Device could not be started\n");
320 		return -1;
321 	}
322 
323 	return 0;
324 }
325 
326 static void
327 generic_ut_teardown(void)
328 {
329 	rte_compressdev_stop(0);
330 	if (rte_compressdev_close(0) < 0)
331 		RTE_LOG(ERR, USER1, "Device could not be closed\n");
332 }
333 
334 static int
335 test_compressdev_invalid_configuration(void)
336 {
337 	struct rte_compressdev_config invalid_config;
338 	struct rte_compressdev_config valid_config = {
339 		.socket_id = rte_socket_id(),
340 		.nb_queue_pairs = 1,
341 		.max_nb_priv_xforms = NUM_MAX_XFORMS,
342 		.max_nb_streams = 1
343 	};
344 	struct rte_compressdev_info dev_info;
345 
346 	RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
347 
348 	/* Invalid configuration with 0 queue pairs */
349 	memcpy(&invalid_config, &valid_config,
350 			sizeof(struct rte_compressdev_config));
351 	invalid_config.nb_queue_pairs = 0;
352 
353 	TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
354 			"Device configuration was successful "
355 			"with no queue pairs (invalid)\n");
356 
357 	/*
358 	 * Invalid configuration with too many queue pairs
359 	 * (if there is an actual maximum number of queue pairs)
360 	 */
361 	rte_compressdev_info_get(0, &dev_info);
362 	if (dev_info.max_nb_queue_pairs != 0) {
363 		memcpy(&invalid_config, &valid_config,
364 			sizeof(struct rte_compressdev_config));
365 		invalid_config.nb_queue_pairs = dev_info.max_nb_queue_pairs + 1;
366 
367 		TEST_ASSERT_FAIL(rte_compressdev_configure(0, &invalid_config),
368 				"Device configuration was successful "
369 				"with too many queue pairs (invalid)\n");
370 	}
371 
372 	/* Invalid queue pair setup, with no number of queue pairs set */
373 	TEST_ASSERT_FAIL(rte_compressdev_queue_pair_setup(0, 0,
374 				NUM_MAX_INFLIGHT_OPS, rte_socket_id()),
375 			"Queue pair setup was successful "
376 			"with no queue pairs set (invalid)\n");
377 
378 	return TEST_SUCCESS;
379 }
380 
381 static int
382 compare_buffers(const char *buffer1, uint32_t buffer1_len,
383 		const char *buffer2, uint32_t buffer2_len)
384 {
385 	if (buffer1_len != buffer2_len) {
386 		RTE_LOG(ERR, USER1, "Buffer lengths are different\n");
387 		return -1;
388 	}
389 
390 	if (memcmp(buffer1, buffer2, buffer1_len) != 0) {
391 		RTE_LOG(ERR, USER1, "Buffers are different\n");
392 		return -1;
393 	}
394 
395 	return 0;
396 }
397 
398 /*
399  * Maps compressdev and Zlib flush flags
400  */
401 static int
402 map_zlib_flush_flag(enum rte_comp_flush_flag flag)
403 {
404 	switch (flag) {
405 	case RTE_COMP_FLUSH_NONE:
406 		return Z_NO_FLUSH;
407 	case RTE_COMP_FLUSH_SYNC:
408 		return Z_SYNC_FLUSH;
409 	case RTE_COMP_FLUSH_FULL:
410 		return Z_FULL_FLUSH;
411 	case RTE_COMP_FLUSH_FINAL:
412 		return Z_FINISH;
413 	/*
414 	 * There should be only the values above,
415 	 * so this should never happen
416 	 */
417 	default:
418 		return -1;
419 	}
420 }
421 
422 static int
423 compress_zlib(struct rte_comp_op *op,
424 		const struct rte_comp_xform *xform, int mem_level)
425 {
426 	z_stream stream;
427 	int zlib_flush;
428 	int strategy, window_bits, comp_level;
429 	int ret = TEST_FAILED;
430 	uint8_t *single_src_buf = NULL;
431 	uint8_t *single_dst_buf = NULL;
432 
433 	/* initialize zlib stream */
434 	stream.zalloc = Z_NULL;
435 	stream.zfree = Z_NULL;
436 	stream.opaque = Z_NULL;
437 
438 	if (xform->compress.deflate.huffman == RTE_COMP_HUFFMAN_FIXED)
439 		strategy = Z_FIXED;
440 	else
441 		strategy = Z_DEFAULT_STRATEGY;
442 
443 	/*
444 	 * Window bits is the base two logarithm of the window size (in bytes).
445 	 * When doing raw DEFLATE, this number will be negative.
446 	 */
447 	window_bits = -(xform->compress.window_size);
448 	if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32)
449 		window_bits *= -1;
450 	else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32)
451 		window_bits = ZLIB_CRC_CHECKSUM_WINDOW_BITS;
452 
453 	comp_level = xform->compress.level;
454 
455 	if (comp_level != RTE_COMP_LEVEL_NONE)
456 		ret = deflateInit2(&stream, comp_level, Z_DEFLATED,
457 			window_bits, mem_level, strategy);
458 	else
459 		ret = deflateInit(&stream, Z_NO_COMPRESSION);
460 
461 	if (ret != Z_OK) {
462 		printf("Zlib deflate could not be initialized\n");
463 		goto exit;
464 	}
465 
466 	/* Assuming stateless operation */
467 	/* SGL Input */
468 	if (op->m_src->nb_segs > 1) {
469 		single_src_buf = rte_malloc(NULL,
470 				rte_pktmbuf_pkt_len(op->m_src), 0);
471 		if (single_src_buf == NULL) {
472 			RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
473 			goto exit;
474 		}
475 
476 		if (rte_pktmbuf_read(op->m_src, op->src.offset,
477 					rte_pktmbuf_pkt_len(op->m_src) -
478 					op->src.offset,
479 					single_src_buf) == NULL) {
480 			RTE_LOG(ERR, USER1,
481 				"Buffer could not be read entirely\n");
482 			goto exit;
483 		}
484 
485 		stream.avail_in = op->src.length;
486 		stream.next_in = single_src_buf;
487 
488 	} else {
489 		stream.avail_in = op->src.length;
490 		stream.next_in = rte_pktmbuf_mtod_offset(op->m_src, uint8_t *,
491 				op->src.offset);
492 	}
493 	/* SGL output */
494 	if (op->m_dst->nb_segs > 1) {
495 
496 		single_dst_buf = rte_malloc(NULL,
497 				rte_pktmbuf_pkt_len(op->m_dst), 0);
498 			if (single_dst_buf == NULL) {
499 				RTE_LOG(ERR, USER1,
500 					"Buffer could not be allocated\n");
501 			goto exit;
502 		}
503 
504 		stream.avail_out = op->m_dst->pkt_len;
505 		stream.next_out = single_dst_buf;
506 
507 	} else {/* linear output */
508 		stream.avail_out = op->m_dst->data_len;
509 		stream.next_out = rte_pktmbuf_mtod_offset(op->m_dst, uint8_t *,
510 				op->dst.offset);
511 	}
512 
513 	/* Stateless operation, all buffer will be compressed in one go */
514 	zlib_flush = map_zlib_flush_flag(op->flush_flag);
515 	ret = deflate(&stream, zlib_flush);
516 
517 	if (stream.avail_in != 0) {
518 		RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
519 		goto exit;
520 	}
521 
522 	if (ret != Z_STREAM_END)
523 		goto exit;
524 
525 	/* Copy data to destination SGL */
526 	if (op->m_dst->nb_segs > 1) {
527 		uint32_t remaining_data = stream.total_out;
528 		uint8_t *src_data = single_dst_buf;
529 		struct rte_mbuf *dst_buf = op->m_dst;
530 
531 		while (remaining_data > 0) {
532 			uint8_t *dst_data = rte_pktmbuf_mtod_offset(dst_buf,
533 						uint8_t *, op->dst.offset);
534 			/* Last segment */
535 			if (remaining_data < dst_buf->data_len) {
536 				memcpy(dst_data, src_data, remaining_data);
537 				remaining_data = 0;
538 			} else {
539 				memcpy(dst_data, src_data, dst_buf->data_len);
540 				remaining_data -= dst_buf->data_len;
541 				src_data += dst_buf->data_len;
542 				dst_buf = dst_buf->next;
543 			}
544 		}
545 	}
546 
547 	op->consumed = stream.total_in;
548 	if (xform->compress.chksum == RTE_COMP_CHECKSUM_ADLER32) {
549 		rte_pktmbuf_adj(op->m_dst, ZLIB_HEADER_SIZE);
550 		rte_pktmbuf_trim(op->m_dst, ZLIB_TRAILER_SIZE);
551 		op->produced = stream.total_out - (ZLIB_HEADER_SIZE +
552 				ZLIB_TRAILER_SIZE);
553 	} else if (xform->compress.chksum == RTE_COMP_CHECKSUM_CRC32) {
554 		rte_pktmbuf_adj(op->m_dst, GZIP_HEADER_SIZE);
555 		rte_pktmbuf_trim(op->m_dst, GZIP_TRAILER_SIZE);
556 		op->produced = stream.total_out - (GZIP_HEADER_SIZE +
557 				GZIP_TRAILER_SIZE);
558 	} else
559 		op->produced = stream.total_out;
560 
561 	op->status = RTE_COMP_OP_STATUS_SUCCESS;
562 	op->output_chksum = stream.adler;
563 
564 	deflateReset(&stream);
565 
566 	ret = 0;
567 exit:
568 	deflateEnd(&stream);
569 	rte_free(single_src_buf);
570 	rte_free(single_dst_buf);
571 
572 	return ret;
573 }
574 
575 static int
576 decompress_zlib(struct rte_comp_op *op,
577 		const struct rte_comp_xform *xform)
578 {
579 	z_stream stream;
580 	int window_bits;
581 	int zlib_flush;
582 	int ret = TEST_FAILED;
583 	uint8_t *single_src_buf = NULL;
584 	uint8_t *single_dst_buf = NULL;
585 
586 	/* initialize zlib stream */
587 	stream.zalloc = Z_NULL;
588 	stream.zfree = Z_NULL;
589 	stream.opaque = Z_NULL;
590 
591 	/*
592 	 * Window bits is the base two logarithm of the window size (in bytes).
593 	 * When doing raw DEFLATE, this number will be negative.
594 	 */
595 	window_bits = -(xform->decompress.window_size);
596 	ret = inflateInit2(&stream, window_bits);
597 
598 	if (ret != Z_OK) {
599 		printf("Zlib deflate could not be initialized\n");
600 		goto exit;
601 	}
602 
603 	/* Assuming stateless operation */
604 	/* SGL */
605 	if (op->m_src->nb_segs > 1) {
606 		single_src_buf = rte_malloc(NULL,
607 				rte_pktmbuf_pkt_len(op->m_src), 0);
608 		if (single_src_buf == NULL) {
609 			RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
610 			goto exit;
611 		}
612 		single_dst_buf = rte_malloc(NULL,
613 				rte_pktmbuf_pkt_len(op->m_dst), 0);
614 		if (single_dst_buf == NULL) {
615 			RTE_LOG(ERR, USER1, "Buffer could not be allocated\n");
616 			goto exit;
617 		}
618 		if (rte_pktmbuf_read(op->m_src, 0,
619 					rte_pktmbuf_pkt_len(op->m_src),
620 					single_src_buf) == NULL) {
621 			RTE_LOG(ERR, USER1,
622 				"Buffer could not be read entirely\n");
623 			goto exit;
624 		}
625 
626 		stream.avail_in = op->src.length;
627 		stream.next_in = single_src_buf;
628 		stream.avail_out = rte_pktmbuf_pkt_len(op->m_dst);
629 		stream.next_out = single_dst_buf;
630 
631 	} else {
632 		stream.avail_in = op->src.length;
633 		stream.next_in = rte_pktmbuf_mtod(op->m_src, uint8_t *);
634 		stream.avail_out = op->m_dst->data_len;
635 		stream.next_out = rte_pktmbuf_mtod(op->m_dst, uint8_t *);
636 	}
637 
638 	/* Stateless operation, all buffer will be compressed in one go */
639 	zlib_flush = map_zlib_flush_flag(op->flush_flag);
640 	ret = inflate(&stream, zlib_flush);
641 
642 	if (stream.avail_in != 0) {
643 		RTE_LOG(ERR, USER1, "Buffer could not be read entirely\n");
644 		goto exit;
645 	}
646 
647 	if (ret != Z_STREAM_END)
648 		goto exit;
649 
650 	if (op->m_src->nb_segs > 1) {
651 		uint32_t remaining_data = stream.total_out;
652 		uint8_t *src_data = single_dst_buf;
653 		struct rte_mbuf *dst_buf = op->m_dst;
654 
655 		while (remaining_data > 0) {
656 			uint8_t *dst_data = rte_pktmbuf_mtod(dst_buf,
657 					uint8_t *);
658 			/* Last segment */
659 			if (remaining_data < dst_buf->data_len) {
660 				memcpy(dst_data, src_data, remaining_data);
661 				remaining_data = 0;
662 			} else {
663 				memcpy(dst_data, src_data, dst_buf->data_len);
664 				remaining_data -= dst_buf->data_len;
665 				src_data += dst_buf->data_len;
666 				dst_buf = dst_buf->next;
667 			}
668 		}
669 	}
670 
671 	op->consumed = stream.total_in;
672 	op->produced = stream.total_out;
673 	op->status = RTE_COMP_OP_STATUS_SUCCESS;
674 
675 	inflateReset(&stream);
676 
677 	ret = 0;
678 exit:
679 	inflateEnd(&stream);
680 
681 	return ret;
682 }
683 
684 static int
685 prepare_sgl_bufs(const char *test_buf, struct rte_mbuf *head_buf,
686 		uint32_t total_data_size,
687 		struct rte_mempool *small_mbuf_pool,
688 		struct rte_mempool *large_mbuf_pool,
689 		uint8_t limit_segs_in_sgl,
690 		uint16_t seg_size)
691 {
692 	uint32_t remaining_data = total_data_size;
693 	uint16_t num_remaining_segs = DIV_CEIL(remaining_data, seg_size);
694 	struct rte_mempool *pool;
695 	struct rte_mbuf *next_seg;
696 	uint32_t data_size;
697 	char *buf_ptr;
698 	const char *data_ptr = test_buf;
699 	uint16_t i;
700 	int ret;
701 
702 	if (limit_segs_in_sgl != 0 && num_remaining_segs > limit_segs_in_sgl)
703 		num_remaining_segs = limit_segs_in_sgl - 1;
704 
705 	/*
706 	 * Allocate data in the first segment (header) and
707 	 * copy data if test buffer is provided
708 	 */
709 	if (remaining_data < seg_size)
710 		data_size = remaining_data;
711 	else
712 		data_size = seg_size;
713 
714 	buf_ptr = rte_pktmbuf_append(head_buf, data_size);
715 	if (buf_ptr == NULL) {
716 		RTE_LOG(ERR, USER1,
717 			"Not enough space in the 1st buffer\n");
718 		return -1;
719 	}
720 
721 	if (data_ptr != NULL) {
722 		/* Copy characters without NULL terminator */
723 		memcpy(buf_ptr, data_ptr, data_size);
724 		data_ptr += data_size;
725 	}
726 	remaining_data -= data_size;
727 	num_remaining_segs--;
728 
729 	/*
730 	 * Allocate the rest of the segments,
731 	 * copy the rest of the data and chain the segments.
732 	 */
733 	for (i = 0; i < num_remaining_segs; i++) {
734 
735 		if (i == (num_remaining_segs - 1)) {
736 			/* last segment */
737 			if (remaining_data > seg_size)
738 				pool = large_mbuf_pool;
739 			else
740 				pool = small_mbuf_pool;
741 			data_size = remaining_data;
742 		} else {
743 			data_size = seg_size;
744 			pool = small_mbuf_pool;
745 		}
746 
747 		next_seg = rte_pktmbuf_alloc(pool);
748 		if (next_seg == NULL) {
749 			RTE_LOG(ERR, USER1,
750 				"New segment could not be allocated "
751 				"from the mempool\n");
752 			return -1;
753 		}
754 		buf_ptr = rte_pktmbuf_append(next_seg, data_size);
755 		if (buf_ptr == NULL) {
756 			RTE_LOG(ERR, USER1,
757 				"Not enough space in the buffer\n");
758 			rte_pktmbuf_free(next_seg);
759 			return -1;
760 		}
761 		if (data_ptr != NULL) {
762 			/* Copy characters without NULL terminator */
763 			memcpy(buf_ptr, data_ptr, data_size);
764 			data_ptr += data_size;
765 		}
766 		remaining_data -= data_size;
767 
768 		ret = rte_pktmbuf_chain(head_buf, next_seg);
769 		if (ret != 0) {
770 			rte_pktmbuf_free(next_seg);
771 			RTE_LOG(ERR, USER1,
772 				"Segment could not chained\n");
773 			return -1;
774 		}
775 	}
776 
777 	return 0;
778 }
779 
780 static void
781 extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
782 {
783 }
784 
785 static int
786 test_run_enqueue_dequeue(struct rte_comp_op **ops,
787 			 struct rte_comp_op **ops_processed,
788 			 unsigned int num_bufs)
789 {
790 	uint16_t num_enqd, num_deqd, num_total_deqd;
791 	unsigned int deqd_retries = 0;
792 	int res = 0;
793 
794 	/* Enqueue and dequeue all operations */
795 	num_enqd = rte_compressdev_enqueue_burst(0, 0, ops, num_bufs);
796 	if (num_enqd < num_bufs) {
797 		RTE_LOG(ERR, USER1,
798 			"Some operations could not be enqueued\n");
799 		res = -1;
800 	}
801 
802 	/* dequeue ops even on error (same number of ops as was enqueued) */
803 
804 	num_total_deqd = 0;
805 	while (num_total_deqd < num_enqd) {
806 		/*
807 		 * If retrying a dequeue call, wait for 10 ms to allow
808 		 * enough time to the driver to process the operations
809 		 */
810 		if (deqd_retries != 0) {
811 			/*
812 			 * Avoid infinite loop if not all the
813 			 * operations get out of the device
814 			 */
815 			if (deqd_retries == MAX_DEQD_RETRIES) {
816 				RTE_LOG(ERR, USER1,
817 					"Not all operations could be dequeued\n");
818 				res = -1;
819 				break;
820 			}
821 			usleep(DEQUEUE_WAIT_TIME);
822 		}
823 		num_deqd = rte_compressdev_dequeue_burst(0, 0,
824 				&ops_processed[num_total_deqd], num_bufs);
825 		num_total_deqd += num_deqd;
826 		deqd_retries++;
827 
828 	}
829 
830 	return res;
831 }
832 
833 /**
834  * Arrays initialization. Input buffers preparation for compression.
835  *
836  * API that initializes all the private arrays to NULL
837  * and allocates input buffers to perform compression operations.
838  *
839  * @param int_data
840  *   Interim data containing session/transformation objects.
841  * @param test_data
842  *   The test parameters set by users (command line parameters).
843  * @param test_priv_data
844  *   A container used for aggregation all the private test arrays.
845  * @return
846  *   - 0: On success.
847  *   - -1: On error.
848  */
849 static int
850 test_setup_com_bufs(const struct interim_data_params *int_data,
851 		const struct test_data_params *test_data,
852 		const struct test_private_arrays *test_priv_data)
853 {
854 	/* local variables: */
855 	unsigned int i;
856 	uint32_t data_size;
857 	char *buf_ptr;
858 	int ret;
859 	char **all_decomp_data = test_priv_data->all_decomp_data;
860 
861 	struct comp_testsuite_params *ts_params = &testsuite_params;
862 
863 	/* from int_data: */
864 	const char * const *test_bufs = int_data->test_bufs;
865 	unsigned int num_bufs = int_data->num_bufs;
866 
867 	/* from test_data: */
868 	unsigned int buff_type = test_data->buff_type;
869 	unsigned int big_data = test_data->big_data;
870 
871 	/* from test_priv_data: */
872 	struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
873 	struct rte_mempool *buf_pool;
874 
875 	static struct rte_mbuf_ext_shared_info inbuf_info;
876 
877 	size_t array_size = sizeof(void *) * num_bufs;
878 
879 	/* Initialize all arrays to NULL */
880 	memset(test_priv_data->uncomp_bufs, 0, array_size);
881 	memset(test_priv_data->comp_bufs, 0, array_size);
882 	memset(test_priv_data->ops, 0, array_size);
883 	memset(test_priv_data->ops_processed, 0, array_size);
884 	memset(test_priv_data->priv_xforms, 0, array_size);
885 	memset(test_priv_data->compressed_data_size,
886 	       0, sizeof(uint32_t) * num_bufs);
887 
888 	if (test_data->decompress_state == RTE_COMP_OP_STATEFUL) {
889 		data_size = strlen(test_bufs[0]) + 1;
890 		*all_decomp_data = rte_malloc(NULL, data_size,
891 					     RTE_CACHE_LINE_SIZE);
892 	}
893 
894 	if (big_data)
895 		buf_pool = ts_params->big_mbuf_pool;
896 	else if (buff_type == SGL_BOTH)
897 		buf_pool = ts_params->small_mbuf_pool;
898 	else
899 		buf_pool = ts_params->large_mbuf_pool;
900 
901 	/* for compression uncomp_bufs is used as a source buffer */
902 	/* allocation from buf_pool (mempool type) */
903 	ret = rte_pktmbuf_alloc_bulk(buf_pool,
904 				uncomp_bufs, num_bufs);
905 	if (ret < 0) {
906 		RTE_LOG(ERR, USER1,
907 			"Source mbufs could not be allocated "
908 			"from the mempool\n");
909 		return -1;
910 	}
911 
912 	if (test_data->use_external_mbufs) {
913 		inbuf_info.free_cb = extbuf_free_callback;
914 		inbuf_info.fcb_opaque = NULL;
915 		rte_mbuf_ext_refcnt_set(&inbuf_info, 1);
916 		for (i = 0; i < num_bufs; i++) {
917 			rte_pktmbuf_attach_extbuf(uncomp_bufs[i],
918 					test_data->inbuf_memzone->addr,
919 					test_data->inbuf_memzone->iova,
920 					test_data->inbuf_data_size,
921 					&inbuf_info);
922 			buf_ptr = rte_pktmbuf_append(uncomp_bufs[i],
923 					test_data->inbuf_data_size);
924 			if (buf_ptr == NULL) {
925 				RTE_LOG(ERR, USER1,
926 					"Append extra bytes to the source mbuf failed\n");
927 				return -1;
928 			}
929 		}
930 	} else if (buff_type == SGL_BOTH || buff_type == SGL_TO_LB) {
931 		for (i = 0; i < num_bufs; i++) {
932 			data_size = strlen(test_bufs[i]) + 1;
933 			if (prepare_sgl_bufs(test_bufs[i], uncomp_bufs[i],
934 			    data_size,
935 			    big_data ? buf_pool : ts_params->small_mbuf_pool,
936 			    big_data ? buf_pool : ts_params->large_mbuf_pool,
937 			    big_data ? 0 : MAX_SEGS,
938 			    big_data ? MAX_DATA_MBUF_SIZE : SMALL_SEG_SIZE) < 0)
939 				return -1;
940 		}
941 	} else {
942 		for (i = 0; i < num_bufs; i++) {
943 			data_size = strlen(test_bufs[i]) + 1;
944 
945 			buf_ptr = rte_pktmbuf_append(uncomp_bufs[i], data_size);
946 			if (buf_ptr == NULL) {
947 				RTE_LOG(ERR, USER1,
948 					"Append extra bytes to the source mbuf failed\n");
949 				return -1;
950 			}
951 			strlcpy(buf_ptr, test_bufs[i], data_size);
952 		}
953 	}
954 
955 	return 0;
956 }
957 
958 /**
959  * Data size calculation (for both compression and decompression).
960  *
961  * Calculate size of anticipated output buffer required for both
962  * compression and decompression operations based on input int_data.
963  *
964  * @param op_type
965  *   Operation type: compress or decompress
966  * @param out_of_space_and_zlib
967  *   Boolean value to switch into "out of space" buffer if set.
968  *   To test "out-of-space" data size, zlib_decompress must be set as well.
969  * @param test_priv_data
970  *   A container used for aggregation all the private test arrays.
971  * @param int_data
972  *   Interim data containing session/transformation objects.
973  * @param test_data
974  *   The test parameters set by users (command line parameters).
975  * @param i
976  *   current buffer index
977  * @return
978  *   data size
979  */
980 static inline uint32_t
981 test_mbufs_calculate_data_size(
982 		enum operation_type op_type,
983 		unsigned int out_of_space_and_zlib,
984 		const struct test_private_arrays *test_priv_data,
985 		const struct interim_data_params *int_data,
986 		const struct test_data_params *test_data,
987 		unsigned int i)
988 {
989 	/* local variables: */
990 	uint32_t data_size;
991 	struct priv_op_data *priv_data;
992 	float ratio_val;
993 	enum ratio_switch ratio = test_data->ratio;
994 
995 	uint8_t not_zlib_compr; /* true if zlib isn't current compression dev */
996 	enum overflow_test overflow = test_data->overflow;
997 
998 	/* from test_priv_data: */
999 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1000 
1001 	/* from int_data: */
1002 	const char * const *test_bufs = int_data->test_bufs;
1003 
1004 	if (out_of_space_and_zlib)
1005 		data_size = OUT_OF_SPACE_BUF;
1006 	else {
1007 		if (op_type == OPERATION_COMPRESSION) {
1008 			not_zlib_compr = (test_data->zlib_dir == ZLIB_DECOMPRESS
1009 				|| test_data->zlib_dir == ZLIB_NONE);
1010 
1011 			ratio_val = (ratio == RATIO_ENABLED) ?
1012 					COMPRESS_BUF_SIZE_RATIO :
1013 					COMPRESS_BUF_SIZE_RATIO_DISABLED;
1014 
1015 			ratio_val = (not_zlib_compr &&
1016 				(overflow == OVERFLOW_ENABLED)) ?
1017 				COMPRESS_BUF_SIZE_RATIO_OVERFLOW :
1018 				ratio_val;
1019 
1020 			data_size = strlen(test_bufs[i]) * ratio_val;
1021 		} else {
1022 			priv_data = (struct priv_op_data *)
1023 					(ops_processed[i] + 1);
1024 			data_size = strlen(test_bufs[priv_data->orig_idx]) + 1;
1025 		}
1026 	}
1027 
1028 	return data_size;
1029 }
1030 
1031 
1032 /**
1033  * Memory buffers preparation (for both compression and decompression).
1034  *
1035  * Function allocates output buffers to perform compression
1036  * or decompression operations depending on value of op_type.
1037  *
1038  * @param op_type
1039  *   Operation type: compress or decompress
1040  * @param out_of_space_and_zlib
1041  *   Boolean value to switch into "out of space" buffer if set.
1042  *   To test "out-of-space" data size, zlib_decompress must be set as well.
1043  * @param test_priv_data
1044  *   A container used for aggregation all the private test arrays.
1045  * @param int_data
1046  *   Interim data containing session/transformation objects.
1047  * @param test_data
1048  *   The test parameters set by users (command line parameters).
1049  * @param current_extbuf_info,
1050  *   The structure containing all the information related to external mbufs
1051  * @return
1052  *   - 0: On success.
1053  *   - -1: On error.
1054  */
1055 static int
1056 test_setup_output_bufs(
1057 		enum operation_type op_type,
1058 		unsigned int out_of_space_and_zlib,
1059 		const struct test_private_arrays *test_priv_data,
1060 		const struct interim_data_params *int_data,
1061 		const struct test_data_params *test_data,
1062 		struct rte_mbuf_ext_shared_info *current_extbuf_info)
1063 {
1064 	/* local variables: */
1065 	unsigned int i;
1066 	uint32_t data_size;
1067 	int ret;
1068 	char *buf_ptr;
1069 
1070 	/* from test_priv_data: */
1071 	struct rte_mbuf **current_bufs;
1072 
1073 	/* from int_data: */
1074 	unsigned int num_bufs = int_data->num_bufs;
1075 
1076 	/* from test_data: */
1077 	unsigned int buff_type = test_data->buff_type;
1078 	unsigned int big_data = test_data->big_data;
1079 	const struct rte_memzone *current_memzone;
1080 
1081 	struct comp_testsuite_params *ts_params = &testsuite_params;
1082 	struct rte_mempool *buf_pool;
1083 
1084 	if (big_data)
1085 		buf_pool = ts_params->big_mbuf_pool;
1086 	else if (buff_type == SGL_BOTH)
1087 		buf_pool = ts_params->small_mbuf_pool;
1088 	else
1089 		buf_pool = ts_params->large_mbuf_pool;
1090 
1091 	if (op_type == OPERATION_COMPRESSION)
1092 		current_bufs = test_priv_data->comp_bufs;
1093 	else
1094 		current_bufs = test_priv_data->uncomp_bufs;
1095 
1096 	/* the mbufs allocation*/
1097 	ret = rte_pktmbuf_alloc_bulk(buf_pool, current_bufs, num_bufs);
1098 	if (ret < 0) {
1099 		RTE_LOG(ERR, USER1,
1100 			"Destination mbufs could not be allocated "
1101 			"from the mempool\n");
1102 		return -1;
1103 	}
1104 
1105 	if (test_data->use_external_mbufs) {
1106 		current_extbuf_info->free_cb = extbuf_free_callback;
1107 		current_extbuf_info->fcb_opaque = NULL;
1108 		rte_mbuf_ext_refcnt_set(current_extbuf_info, 1);
1109 		if (op_type == OPERATION_COMPRESSION)
1110 			current_memzone = test_data->compbuf_memzone;
1111 		else
1112 			current_memzone = test_data->uncompbuf_memzone;
1113 
1114 		for (i = 0; i < num_bufs; i++) {
1115 			rte_pktmbuf_attach_extbuf(current_bufs[i],
1116 					current_memzone->addr,
1117 					current_memzone->iova,
1118 					current_memzone->len,
1119 					current_extbuf_info);
1120 			rte_pktmbuf_append(current_bufs[i],
1121 					current_memzone->len);
1122 		}
1123 	} else {
1124 		for (i = 0; i < num_bufs; i++) {
1125 
1126 			enum rte_comp_huffman comp_huffman =
1127 			ts_params->def_comp_xform->compress.deflate.huffman;
1128 
1129 			/* data size calculation */
1130 			data_size = test_mbufs_calculate_data_size(
1131 					op_type,
1132 					out_of_space_and_zlib,
1133 					test_priv_data,
1134 					int_data,
1135 					test_data,
1136 					i);
1137 
1138 			if (comp_huffman != RTE_COMP_HUFFMAN_DYNAMIC) {
1139 				if (op_type == OPERATION_DECOMPRESSION)
1140 					data_size *= COMPRESS_BUF_SIZE_RATIO;
1141 			}
1142 
1143 			/* data allocation */
1144 			if (buff_type == SGL_BOTH || buff_type == LB_TO_SGL) {
1145 				ret = prepare_sgl_bufs(NULL, current_bufs[i],
1146 				      data_size,
1147 				      big_data ? buf_pool :
1148 						ts_params->small_mbuf_pool,
1149 				      big_data ? buf_pool :
1150 						ts_params->large_mbuf_pool,
1151 				      big_data ? 0 : MAX_SEGS,
1152 				      big_data ? MAX_DATA_MBUF_SIZE :
1153 						 SMALL_SEG_SIZE);
1154 				if (ret < 0)
1155 					return -1;
1156 			} else {
1157 				buf_ptr = rte_pktmbuf_append(current_bufs[i],
1158 						data_size);
1159 				if (buf_ptr == NULL) {
1160 					RTE_LOG(ERR, USER1,
1161 						"Append extra bytes to the destination mbuf failed\n");
1162 					return -1;
1163 				}
1164 			}
1165 		}
1166 	}
1167 
1168 	return 0;
1169 }
1170 
1171 /**
1172  * The main compression function.
1173  *
1174  * Function performs compression operation.
1175  * Operation(s) configuration, depending on CLI parameters.
1176  * Operation(s) processing.
1177  *
1178  * @param int_data
1179  *   Interim data containing session/transformation objects.
1180  * @param test_data
1181  *   The test parameters set by users (command line parameters).
1182  * @param test_priv_data
1183  *   A container used for aggregation all the private test arrays.
1184  * @return
1185  *   - 0: On success.
1186  *   - -1: On error.
1187  */
1188 static int
1189 test_deflate_comp_run(const struct interim_data_params *int_data,
1190 		const struct test_data_params *test_data,
1191 		const struct test_private_arrays *test_priv_data)
1192 {
1193 	/* local variables: */
1194 	struct priv_op_data *priv_data;
1195 	unsigned int i;
1196 	uint16_t num_priv_xforms = 0;
1197 	int ret;
1198 	int ret_status = 0;
1199 	char *buf_ptr;
1200 
1201 	struct comp_testsuite_params *ts_params = &testsuite_params;
1202 
1203 	/* from test_data: */
1204 	enum rte_comp_op_type operation_type = test_data->compress_state;
1205 	unsigned int zlib_compress =
1206 			(test_data->zlib_dir == ZLIB_ALL ||
1207 			test_data->zlib_dir == ZLIB_COMPRESS);
1208 
1209 	/* from int_data: */
1210 	struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1211 	unsigned int num_xforms = int_data->num_xforms;
1212 	unsigned int num_bufs = int_data->num_bufs;
1213 
1214 	/* from test_priv_data: */
1215 	struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1216 	struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1217 	struct rte_comp_op **ops = test_priv_data->ops;
1218 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1219 	void **priv_xforms = test_priv_data->priv_xforms;
1220 
1221 	const struct rte_compressdev_capabilities *capa =
1222 		rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1223 
1224 	/* Build the compression operations */
1225 	ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1226 	if (ret < 0) {
1227 		RTE_LOG(ERR, USER1,
1228 			"Compress operations could not be allocated "
1229 			"from the mempool\n");
1230 		ret_status = -1;
1231 		goto exit;
1232 	}
1233 
1234 	for (i = 0; i < num_bufs; i++) {
1235 		ops[i]->m_src = uncomp_bufs[i];
1236 		ops[i]->m_dst = comp_bufs[i];
1237 		ops[i]->src.offset = 0;
1238 		ops[i]->src.length = rte_pktmbuf_pkt_len(uncomp_bufs[i]);
1239 		ops[i]->dst.offset = 0;
1240 
1241 		RTE_LOG(DEBUG, USER1,
1242 				"Uncompressed buffer length = %u compressed buffer length = %u",
1243 				rte_pktmbuf_pkt_len(uncomp_bufs[i]),
1244 				rte_pktmbuf_pkt_len(comp_bufs[i]));
1245 
1246 		if (operation_type == RTE_COMP_OP_STATELESS) {
1247 			ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1248 		} else {
1249 			RTE_LOG(ERR, USER1,
1250 				"Compression: stateful operations are not "
1251 				"supported in these tests yet\n");
1252 			ret_status = -1;
1253 			goto exit;
1254 		}
1255 		ops[i]->input_chksum = 0;
1256 		/*
1257 		 * Store original operation index in private data,
1258 		 * since ordering does not have to be maintained,
1259 		 * when dequeueing from compressdev, so a comparison
1260 		 * at the end of the test can be done.
1261 		 */
1262 		priv_data = (struct priv_op_data *) (ops[i] + 1);
1263 		priv_data->orig_idx = i;
1264 	}
1265 
1266 	/* Compress data (either with Zlib API or compressdev API */
1267 	if (zlib_compress) {
1268 		for (i = 0; i < num_bufs; i++) {
1269 			const struct rte_comp_xform *compress_xform =
1270 				compress_xforms[i % num_xforms];
1271 			ret = compress_zlib(ops[i], compress_xform,
1272 					DEFAULT_MEM_LEVEL);
1273 			if (ret < 0) {
1274 				ret_status = -1;
1275 				goto exit;
1276 			}
1277 
1278 			ops_processed[i] = ops[i];
1279 		}
1280 	} else {
1281 		/* Create compress private xform data */
1282 		for (i = 0; i < num_xforms; i++) {
1283 			ret = rte_compressdev_private_xform_create(0,
1284 				(const struct rte_comp_xform *)
1285 					compress_xforms[i],
1286 				&priv_xforms[i]);
1287 			if (ret < 0) {
1288 				RTE_LOG(ERR, USER1,
1289 					"Compression private xform "
1290 					"could not be created\n");
1291 				ret_status = -1;
1292 				goto exit;
1293 			}
1294 			num_priv_xforms++;
1295 		}
1296 		if (capa->comp_feature_flags &
1297 				RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1298 			/* Attach shareable private xform data to ops */
1299 			for (i = 0; i < num_bufs; i++)
1300 				ops[i]->private_xform =
1301 						priv_xforms[i % num_xforms];
1302 		} else {
1303 		/* Create rest of the private xforms for the other ops */
1304 			for (i = num_xforms; i < num_bufs; i++) {
1305 				ret = rte_compressdev_private_xform_create(0,
1306 					compress_xforms[i % num_xforms],
1307 					&priv_xforms[i]);
1308 				if (ret < 0) {
1309 					RTE_LOG(ERR, USER1,
1310 						"Compression private xform "
1311 						"could not be created\n");
1312 					ret_status = -1;
1313 					goto exit;
1314 				}
1315 				num_priv_xforms++;
1316 			}
1317 			/* Attach non shareable private xform data to ops */
1318 			for (i = 0; i < num_bufs; i++)
1319 				ops[i]->private_xform = priv_xforms[i];
1320 		}
1321 
1322 recovery_lb:
1323 		ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
1324 		if (ret < 0) {
1325 			RTE_LOG(ERR, USER1,
1326 				"Compression: enqueue/dequeue operation failed\n");
1327 			ret_status = -1;
1328 			goto exit;
1329 		}
1330 
1331 		for (i = 0; i < num_bufs; i++) {
1332 			test_priv_data->compressed_data_size[i] +=
1333 					ops_processed[i]->produced;
1334 
1335 			if (ops_processed[i]->status ==
1336 				RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE) {
1337 
1338 				ops[i]->status =
1339 					RTE_COMP_OP_STATUS_NOT_PROCESSED;
1340 				ops[i]->src.offset +=
1341 					ops_processed[i]->consumed;
1342 				ops[i]->src.length -=
1343 					ops_processed[i]->consumed;
1344 				ops[i]->dst.offset +=
1345 					ops_processed[i]->produced;
1346 
1347 				buf_ptr = rte_pktmbuf_append(
1348 					ops[i]->m_dst,
1349 					ops_processed[i]->produced);
1350 
1351 				if (buf_ptr == NULL) {
1352 					RTE_LOG(ERR, USER1,
1353 						"Data recovery: append extra bytes to the current mbuf failed\n");
1354 					ret_status = -1;
1355 					goto exit;
1356 				}
1357 				goto recovery_lb;
1358 			}
1359 		}
1360 	}
1361 
1362 exit:
1363 	/* Free resources */
1364 	if (ret_status < 0)
1365 		for (i = 0; i < num_bufs; i++) {
1366 			rte_comp_op_free(ops[i]);
1367 			ops[i] = NULL;
1368 			ops_processed[i] = NULL;
1369 		}
1370 
1371 	/* Free compress private xforms */
1372 	for (i = 0; i < num_priv_xforms; i++) {
1373 		if (priv_xforms[i] != NULL) {
1374 			rte_compressdev_private_xform_free(0, priv_xforms[i]);
1375 			priv_xforms[i] = NULL;
1376 		}
1377 	}
1378 
1379 	return ret_status;
1380 }
1381 
1382 /**
1383  * Prints out the test report. Memory freeing.
1384  *
1385  * Called after successful compression.
1386  * Operation(s) status validation and decompression buffers freeing.
1387 
1388  * -1 returned if function fail.
1389  *
1390  * @param int_data
1391  *   Interim data containing session/transformation objects.
1392  * @param test_data
1393  *   The test parameters set by users (command line parameters).
1394  * @param test_priv_data
1395  *   A container used for aggregation all the private test arrays.
1396  * @return
1397  *   - 2: Some operation is not supported
1398  *   - 1: Decompression should be skipped
1399  *   - 0: On success.
1400  *   - -1: On error.
1401  */
1402 static int
1403 test_deflate_comp_finalize(const struct interim_data_params *int_data,
1404 		const struct test_data_params *test_data,
1405 		const struct test_private_arrays *test_priv_data)
1406 {
1407 	/* local variables: */
1408 	unsigned int i;
1409 	struct priv_op_data *priv_data;
1410 
1411 	/* from int_data: */
1412 	unsigned int num_xforms = int_data->num_xforms;
1413 	struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1414 	uint16_t *buf_idx = int_data->buf_idx;
1415 	unsigned int num_bufs = int_data->num_bufs;
1416 
1417 	/* from test_priv_data: */
1418 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1419 	uint64_t *compress_checksum = test_priv_data->compress_checksum;
1420 	struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1421 	struct rte_comp_op **ops = test_priv_data->ops;
1422 
1423 	/* from test_data: */
1424 	unsigned int out_of_space = test_data->out_of_space;
1425 	unsigned int zlib_compress =
1426 			(test_data->zlib_dir == ZLIB_ALL ||
1427 			test_data->zlib_dir == ZLIB_COMPRESS);
1428 	unsigned int zlib_decompress =
1429 			(test_data->zlib_dir == ZLIB_ALL ||
1430 			test_data->zlib_dir == ZLIB_DECOMPRESS);
1431 
1432 	for (i = 0; i < num_bufs; i++) {
1433 		priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1434 		uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1435 		const struct rte_comp_compress_xform *compress_xform =
1436 				&compress_xforms[xform_idx]->compress;
1437 		enum rte_comp_huffman huffman_type =
1438 			compress_xform->deflate.huffman;
1439 		char engine[] = "zlib (directly, not PMD)";
1440 		if (zlib_decompress)
1441 			strlcpy(engine, "PMD", sizeof(engine));
1442 
1443 		RTE_LOG(DEBUG, USER1, "Buffer %u compressed by %s from %u to"
1444 			" %u bytes (level = %d, huffman = %s)\n",
1445 			buf_idx[priv_data->orig_idx], engine,
1446 			ops_processed[i]->consumed, ops_processed[i]->produced,
1447 			compress_xform->level,
1448 			huffman_type_strings[huffman_type]);
1449 		RTE_LOG(DEBUG, USER1, "Compression ratio = %.2f\n",
1450 			ops_processed[i]->consumed == 0 ? 0 :
1451 			(float)ops_processed[i]->produced /
1452 			ops_processed[i]->consumed * 100);
1453 		if (compress_xform->chksum != RTE_COMP_CHECKSUM_NONE)
1454 			compress_checksum[i] = ops_processed[i]->output_chksum;
1455 		ops[i] = NULL;
1456 	}
1457 
1458 	/*
1459 	 * Check operation status and free source mbufs (destination mbuf and
1460 	 * compress operation information is needed for the decompression stage)
1461 	 */
1462 	for (i = 0; i < num_bufs; i++) {
1463 		if (out_of_space && !zlib_compress) {
1464 			if (ops_processed[i]->status !=
1465 				RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1466 				RTE_LOG(ERR, USER1,
1467 					"Operation without expected out of "
1468 					"space status error\n");
1469 				return -1;
1470 			} else
1471 				continue;
1472 		}
1473 
1474 		if (ops_processed[i]->status != RTE_COMP_OP_STATUS_SUCCESS) {
1475 			if (test_data->overflow == OVERFLOW_ENABLED) {
1476 				if (ops_processed[i]->status ==
1477 				RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1478 					RTE_LOG(INFO, USER1,
1479 					"Out-of-space-recoverable functionality"
1480 					" is not supported on this device\n");
1481 					return 2;
1482 				}
1483 			}
1484 
1485 			RTE_LOG(ERR, USER1,
1486 				"Comp: Some operations were not successful\n");
1487 			return -1;
1488 		}
1489 		priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1490 		rte_pktmbuf_free(uncomp_bufs[priv_data->orig_idx]);
1491 		uncomp_bufs[priv_data->orig_idx] = NULL;
1492 	}
1493 
1494 	if (out_of_space && !zlib_compress)
1495 		return 1;
1496 
1497 	return 0;
1498 }
1499 
1500 /**
1501  * The main decompression function.
1502  *
1503  * Function performs decompression operation.
1504  * Operation(s) configuration, depending on CLI parameters.
1505  * Operation(s) processing.
1506  *
1507  * @param int_data
1508  *   Interim data containing session/transformation objects.
1509  * @param test_data
1510  *   The test parameters set by users (command line parameters).
1511  * @param test_priv_data
1512  *   A container used for aggregation all the private test arrays.
1513  * @return
1514  *   - 0: On success.
1515  *   - -1: On error.
1516  */
1517 static int
1518 test_deflate_decomp_run(const struct interim_data_params *int_data,
1519 		const struct test_data_params *test_data,
1520 		struct test_private_arrays *test_priv_data)
1521 {
1522 
1523 	/* local variables: */
1524 	struct priv_op_data *priv_data;
1525 	unsigned int i;
1526 	uint16_t num_priv_xforms = 0;
1527 	int ret;
1528 	int ret_status = 0;
1529 
1530 	struct comp_testsuite_params *ts_params = &testsuite_params;
1531 
1532 	/* from test_data: */
1533 	enum rte_comp_op_type operation_type = test_data->decompress_state;
1534 	unsigned int zlib_decompress =
1535 			(test_data->zlib_dir == ZLIB_ALL ||
1536 			test_data->zlib_dir == ZLIB_DECOMPRESS);
1537 
1538 	/* from int_data: */
1539 	struct rte_comp_xform **decompress_xforms = int_data->decompress_xforms;
1540 	unsigned int num_xforms = int_data->num_xforms;
1541 	unsigned int num_bufs = int_data->num_bufs;
1542 
1543 	/* from test_priv_data: */
1544 	struct rte_mbuf **uncomp_bufs = test_priv_data->uncomp_bufs;
1545 	struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1546 	struct rte_comp_op **ops = test_priv_data->ops;
1547 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1548 	void **priv_xforms = test_priv_data->priv_xforms;
1549 	uint32_t *compressed_data_size = test_priv_data->compressed_data_size;
1550 	void **stream = test_priv_data->stream;
1551 
1552 	const struct rte_compressdev_capabilities *capa =
1553 		rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
1554 
1555 	ret = rte_comp_op_bulk_alloc(ts_params->op_pool, ops, num_bufs);
1556 	if (ret < 0) {
1557 		RTE_LOG(ERR, USER1,
1558 			"Decompress operations could not be allocated "
1559 			"from the mempool\n");
1560 		ret_status = -1;
1561 		goto exit;
1562 	}
1563 
1564 	/* Source buffer is the compressed data from the previous operations */
1565 	for (i = 0; i < num_bufs; i++) {
1566 		ops[i]->m_src = comp_bufs[i];
1567 		ops[i]->m_dst = uncomp_bufs[i];
1568 		ops[i]->src.offset = 0;
1569 		/*
1570 		 * Set the length of the compressed data to the
1571 		 * number of bytes that were produced in the previous stage
1572 		 */
1573 
1574 		if (compressed_data_size[i])
1575 			ops[i]->src.length = compressed_data_size[i];
1576 		else
1577 			ops[i]->src.length = ops_processed[i]->produced;
1578 
1579 		ops[i]->dst.offset = 0;
1580 
1581 		if (operation_type == RTE_COMP_OP_STATELESS) {
1582 			ops[i]->flush_flag = RTE_COMP_FLUSH_FINAL;
1583 			ops[i]->op_type = RTE_COMP_OP_STATELESS;
1584 		} else if (!zlib_decompress) {
1585 			ops[i]->flush_flag = RTE_COMP_FLUSH_SYNC;
1586 			ops[i]->op_type = RTE_COMP_OP_STATEFUL;
1587 		} else {
1588 			RTE_LOG(ERR, USER1,
1589 				"Decompression: stateful operations are"
1590 				" not supported in these tests yet\n");
1591 			ret_status = -1;
1592 			goto exit;
1593 		}
1594 		ops[i]->input_chksum = 0;
1595 		/*
1596 		 * Copy private data from previous operations,
1597 		 * to keep the pointer to the original buffer
1598 		 */
1599 		memcpy(ops[i] + 1, ops_processed[i] + 1,
1600 				sizeof(struct priv_op_data));
1601 	}
1602 
1603 	/*
1604 	 * Free the previous compress operations,
1605 	 * as they are not needed anymore
1606 	 */
1607 	rte_comp_op_bulk_free(ops_processed, num_bufs);
1608 
1609 	/* Decompress data (either with Zlib API or compressdev API */
1610 	if (zlib_decompress) {
1611 		for (i = 0; i < num_bufs; i++) {
1612 			priv_data = (struct priv_op_data *)(ops[i] + 1);
1613 			uint16_t xform_idx = priv_data->orig_idx % num_xforms;
1614 			const struct rte_comp_xform *decompress_xform =
1615 				decompress_xforms[xform_idx];
1616 
1617 			ret = decompress_zlib(ops[i], decompress_xform);
1618 			if (ret < 0) {
1619 				ret_status = -1;
1620 				goto exit;
1621 			}
1622 
1623 			ops_processed[i] = ops[i];
1624 		}
1625 	} else {
1626 		if (operation_type == RTE_COMP_OP_STATELESS) {
1627 			/* Create decompress private xform data */
1628 			for (i = 0; i < num_xforms; i++) {
1629 				ret = rte_compressdev_private_xform_create(0,
1630 					(const struct rte_comp_xform *)
1631 					decompress_xforms[i],
1632 					&priv_xforms[i]);
1633 				if (ret < 0) {
1634 					RTE_LOG(ERR, USER1,
1635 						"Decompression private xform "
1636 						"could not be created\n");
1637 					ret_status = -1;
1638 					goto exit;
1639 				}
1640 				num_priv_xforms++;
1641 			}
1642 
1643 			if (capa->comp_feature_flags &
1644 					RTE_COMP_FF_SHAREABLE_PRIV_XFORM) {
1645 				/* Attach shareable private xform data to ops */
1646 				for (i = 0; i < num_bufs; i++) {
1647 					priv_data = (struct priv_op_data *)
1648 							(ops[i] + 1);
1649 					uint16_t xform_idx =
1650 					       priv_data->orig_idx % num_xforms;
1651 					ops[i]->private_xform =
1652 							priv_xforms[xform_idx];
1653 				}
1654 			} else {
1655 				/* Create rest of the private xforms */
1656 				/* for the other ops */
1657 				for (i = num_xforms; i < num_bufs; i++) {
1658 					ret =
1659 					 rte_compressdev_private_xform_create(0,
1660 					      decompress_xforms[i % num_xforms],
1661 					      &priv_xforms[i]);
1662 					if (ret < 0) {
1663 						RTE_LOG(ERR, USER1,
1664 							"Decompression private xform"
1665 							" could not be created\n");
1666 						ret_status = -1;
1667 						goto exit;
1668 					}
1669 					num_priv_xforms++;
1670 				}
1671 
1672 				/* Attach non shareable private xform data */
1673 				/* to ops */
1674 				for (i = 0; i < num_bufs; i++) {
1675 					priv_data = (struct priv_op_data *)
1676 							(ops[i] + 1);
1677 					uint16_t xform_idx =
1678 							priv_data->orig_idx;
1679 					ops[i]->private_xform =
1680 							priv_xforms[xform_idx];
1681 				}
1682 			}
1683 		} else {
1684 			/* Create a stream object for stateful decompression */
1685 			ret = rte_compressdev_stream_create(0,
1686 					decompress_xforms[0], stream);
1687 			if (ret < 0) {
1688 				RTE_LOG(ERR, USER1,
1689 					"Decompression stream could not be created, error %d\n",
1690 					ret);
1691 				ret_status = -1;
1692 				goto exit;
1693 			}
1694 			/* Attach stream to ops */
1695 			for (i = 0; i < num_bufs; i++)
1696 				ops[i]->stream = *stream;
1697 		}
1698 
1699 		test_priv_data->num_priv_xforms = num_priv_xforms;
1700 	}
1701 
1702 exit:
1703 	return ret_status;
1704 }
1705 
1706 /**
1707  * Prints out the test report. Memory freeing.
1708  *
1709  * Called after successful decompression.
1710  * Operation(s) status validation and compression buffers freeing.
1711 
1712  * -1 returned if function fail.
1713  *
1714  * @param int_data
1715  *   Interim data containing session/transformation objects.
1716  * @param test_data
1717  *   The test parameters set by users (command line parameters).
1718  * @param test_priv_data
1719  *   A container used for aggregation all the private test arrays.
1720  * @return
1721  *   - 2: Next step must be executed by the caller (stateful decompression only)
1722  *   - 1: On success (caller should stop and exit)
1723  *   - 0: On success.
1724  *   - -1: On error.
1725  */
1726 static int
1727 test_deflate_decomp_finalize(const struct interim_data_params *int_data,
1728 		const struct test_data_params *test_data,
1729 		const struct test_private_arrays *test_priv_data)
1730 {
1731 	/* local variables: */
1732 	unsigned int i;
1733 	struct priv_op_data *priv_data;
1734 	static unsigned int step;
1735 
1736 	/* from int_data: */
1737 	uint16_t *buf_idx = int_data->buf_idx;
1738 	unsigned int num_bufs = int_data->num_bufs;
1739 	const char * const *test_bufs = int_data->test_bufs;
1740 	struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1741 
1742 	/* from test_priv_data: */
1743 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1744 	struct rte_mbuf **comp_bufs = test_priv_data->comp_bufs;
1745 	struct rte_comp_op **ops = test_priv_data->ops;
1746 	uint64_t *compress_checksum = test_priv_data->compress_checksum;
1747 	unsigned int *decomp_produced_data_size =
1748 			test_priv_data->decomp_produced_data_size;
1749 	char **all_decomp_data = test_priv_data->all_decomp_data;
1750 
1751 	/* from test_data: */
1752 	unsigned int out_of_space = test_data->out_of_space;
1753 	enum rte_comp_op_type operation_type = test_data->decompress_state;
1754 
1755 	unsigned int zlib_compress =
1756 			(test_data->zlib_dir == ZLIB_ALL ||
1757 			test_data->zlib_dir == ZLIB_COMPRESS);
1758 	unsigned int zlib_decompress =
1759 			(test_data->zlib_dir == ZLIB_ALL ||
1760 			test_data->zlib_dir == ZLIB_DECOMPRESS);
1761 
1762 	for (i = 0; i < num_bufs; i++) {
1763 		priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1764 		char engine[] = "zlib, (directly, no PMD)";
1765 		if (zlib_compress)
1766 			strlcpy(engine, "pmd", sizeof(engine));
1767 		RTE_LOG(DEBUG, USER1,
1768 			"Buffer %u decompressed by %s from %u to %u bytes\n",
1769 			buf_idx[priv_data->orig_idx], engine,
1770 			ops_processed[i]->consumed, ops_processed[i]->produced);
1771 		ops[i] = NULL;
1772 	}
1773 
1774 	/*
1775 	 * Check operation status and free source mbuf (destination mbuf and
1776 	 * compress operation information is still needed)
1777 	 */
1778 	for (i = 0; i < num_bufs; i++) {
1779 		if (out_of_space && !zlib_decompress) {
1780 			if (ops_processed[i]->status !=
1781 				RTE_COMP_OP_STATUS_OUT_OF_SPACE_TERMINATED) {
1782 
1783 				RTE_LOG(ERR, USER1,
1784 					"Operation without expected out of "
1785 					"space status error\n");
1786 				return -1;
1787 			} else
1788 				continue;
1789 		}
1790 
1791 		if (operation_type == RTE_COMP_OP_STATEFUL
1792 			&& (ops_processed[i]->status ==
1793 				RTE_COMP_OP_STATUS_OUT_OF_SPACE_RECOVERABLE
1794 			    || ops_processed[i]->status ==
1795 				RTE_COMP_OP_STATUS_SUCCESS)) {
1796 
1797 			RTE_LOG(DEBUG, USER1,
1798 					".............RECOVERABLE\n");
1799 
1800 			/* collect the output into all_decomp_data */
1801 			const void *ptr = rte_pktmbuf_read(
1802 					ops_processed[i]->m_dst,
1803 					ops_processed[i]->dst.offset,
1804 					ops_processed[i]->produced,
1805 					*all_decomp_data +
1806 						*decomp_produced_data_size);
1807 			if (ptr != *all_decomp_data +
1808 					*decomp_produced_data_size)
1809 				rte_memcpy(*all_decomp_data +
1810 					   *decomp_produced_data_size,
1811 					   ptr, ops_processed[i]->produced);
1812 
1813 			*decomp_produced_data_size +=
1814 					ops_processed[i]->produced;
1815 			if (ops_processed[i]->src.length >
1816 					ops_processed[i]->consumed) {
1817 				if (ops_processed[i]->status ==
1818 						RTE_COMP_OP_STATUS_SUCCESS) {
1819 					RTE_LOG(ERR, USER1,
1820 					      "Operation finished too early\n");
1821 					return -1;
1822 				}
1823 				step++;
1824 				if (step >= test_data->decompress_steps_max) {
1825 					RTE_LOG(ERR, USER1,
1826 					  "Operation exceeded maximum steps\n");
1827 					return -1;
1828 				}
1829 				ops[i] = ops_processed[i];
1830 				ops[i]->status =
1831 					       RTE_COMP_OP_STATUS_NOT_PROCESSED;
1832 				ops[i]->src.offset +=
1833 						ops_processed[i]->consumed;
1834 				ops[i]->src.length -=
1835 						ops_processed[i]->consumed;
1836 				/* repeat the operation */
1837 				return 2;
1838 			} else {
1839 				/* Compare the original stream with the */
1840 				/* decompressed stream (in size and the data) */
1841 				priv_data = (struct priv_op_data *)
1842 						(ops_processed[i] + 1);
1843 				const char *buf1 =
1844 						test_bufs[priv_data->orig_idx];
1845 				const char *buf2 = *all_decomp_data;
1846 
1847 				if (compare_buffers(buf1, strlen(buf1) + 1,
1848 					  buf2, *decomp_produced_data_size) < 0)
1849 					return -1;
1850 				/* Test checksums */
1851 				if (compress_xforms[0]->compress.chksum
1852 						!= RTE_COMP_CHECKSUM_NONE) {
1853 					if (ops_processed[i]->output_chksum
1854 						      != compress_checksum[i]) {
1855 						RTE_LOG(ERR, USER1,
1856 			"The checksums differ\n"
1857 			"Compression Checksum: %" PRIu64 "\tDecompression "
1858 			"Checksum: %" PRIu64 "\n", compress_checksum[i],
1859 					       ops_processed[i]->output_chksum);
1860 						return -1;
1861 					}
1862 				}
1863 			}
1864 		} else if (ops_processed[i]->status !=
1865 			   RTE_COMP_OP_STATUS_SUCCESS) {
1866 			RTE_LOG(ERR, USER1,
1867 					"Decomp: Some operations were not successful, status = %u\n",
1868 					ops_processed[i]->status);
1869 			return -1;
1870 		}
1871 		priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1872 		rte_pktmbuf_free(comp_bufs[priv_data->orig_idx]);
1873 		comp_bufs[priv_data->orig_idx] = NULL;
1874 	}
1875 
1876 	if (out_of_space && !zlib_decompress)
1877 		return 1;
1878 
1879 	return 0;
1880 }
1881 
1882 /**
1883  * Validation of the output (compression/decompression) data.
1884  *
1885  * The function compares the source stream with the output stream,
1886  * after decompression, to check if compression/decompression
1887  * was correct.
1888  * -1 returned if function fail.
1889  *
1890  * @param int_data
1891  *   Interim data containing session/transformation objects.
1892  * @param test_data
1893  *   The test parameters set by users (command line parameters).
1894  * @param test_priv_data
1895  *   A container used for aggregation all the private test arrays.
1896  * @return
1897  *   - 0: On success.
1898  *   - -1: On error.
1899  */
1900 static int
1901 test_results_validation(const struct interim_data_params *int_data,
1902 		const struct test_data_params *test_data,
1903 		const struct test_private_arrays *test_priv_data)
1904 {
1905 	/* local variables: */
1906 	unsigned int i;
1907 	struct priv_op_data *priv_data;
1908 	const char *buf1;
1909 	const char *buf2;
1910 	char *contig_buf = NULL;
1911 	uint32_t data_size;
1912 
1913 	/* from int_data: */
1914 	struct rte_comp_xform **compress_xforms = int_data->compress_xforms;
1915 	unsigned int num_bufs = int_data->num_bufs;
1916 	const char * const *test_bufs = int_data->test_bufs;
1917 
1918 	/* from test_priv_data: */
1919 	uint64_t *compress_checksum = test_priv_data->compress_checksum;
1920 	struct rte_comp_op **ops_processed = test_priv_data->ops_processed;
1921 
1922 	/*
1923 	 * Compare the original stream with the decompressed stream
1924 	 * (in size and the data)
1925 	 */
1926 	for (i = 0; i < num_bufs; i++) {
1927 		priv_data = (struct priv_op_data *)(ops_processed[i] + 1);
1928 		buf1 = test_data->use_external_mbufs ?
1929 				test_data->inbuf_memzone->addr :
1930 				test_bufs[priv_data->orig_idx];
1931 		data_size = test_data->use_external_mbufs ?
1932 				test_data->inbuf_data_size :
1933 				strlen(buf1) + 1;
1934 
1935 		contig_buf = rte_malloc(NULL, ops_processed[i]->produced, 0);
1936 		if (contig_buf == NULL) {
1937 			RTE_LOG(ERR, USER1, "Contiguous buffer could not "
1938 					"be allocated\n");
1939 			goto exit;
1940 		}
1941 
1942 		buf2 = rte_pktmbuf_read(ops_processed[i]->m_dst, 0,
1943 				ops_processed[i]->produced, contig_buf);
1944 		if (compare_buffers(buf1, data_size,
1945 				buf2, ops_processed[i]->produced) < 0)
1946 			goto exit;
1947 
1948 		/* Test checksums */
1949 		if (compress_xforms[0]->compress.chksum !=
1950 				RTE_COMP_CHECKSUM_NONE) {
1951 			if (ops_processed[i]->output_chksum !=
1952 					compress_checksum[i]) {
1953 				RTE_LOG(ERR, USER1, "The checksums differ\n"
1954 			"Compression Checksum: %" PRIu64 "\tDecompression "
1955 			"Checksum: %" PRIu64 "\n", compress_checksum[i],
1956 			ops_processed[i]->output_chksum);
1957 				goto exit;
1958 			}
1959 		}
1960 
1961 		rte_free(contig_buf);
1962 		contig_buf = NULL;
1963 	}
1964 	return 0;
1965 
1966 exit:
1967 	rte_free(contig_buf);
1968 	return -1;
1969 }
1970 
1971 /**
1972  * Compresses and decompresses input stream with compressdev API and Zlib API
1973  *
1974  * Basic test function. Common for all the functional tests.
1975  * -1 returned if function fail.
1976  *
1977  * @param int_data
1978  *   Interim data containing session/transformation objects.
1979  * @param test_data
1980  *   The test parameters set by users (command line parameters).
1981  * @return
1982  *   - 1: Some operation not supported
1983  *   - 0: On success.
1984  *   - -1: On error.
1985  */
1986 
1987 static int
1988 test_deflate_comp_decomp(const struct interim_data_params *int_data,
1989 		const struct test_data_params *test_data)
1990 {
1991 	unsigned int num_bufs = int_data->num_bufs;
1992 	unsigned int out_of_space = test_data->out_of_space;
1993 
1994 	void *stream = NULL;
1995 	char *all_decomp_data = NULL;
1996 	unsigned int decomp_produced_data_size = 0;
1997 
1998 	int ret_status = -1;
1999 	int ret;
2000 	struct rte_mbuf *uncomp_bufs[num_bufs];
2001 	struct rte_mbuf *comp_bufs[num_bufs];
2002 	struct rte_comp_op *ops[num_bufs];
2003 	struct rte_comp_op *ops_processed[num_bufs];
2004 	void *priv_xforms[num_bufs];
2005 	unsigned int i;
2006 
2007 	uint64_t compress_checksum[num_bufs];
2008 	uint32_t compressed_data_size[num_bufs];
2009 	char *contig_buf = NULL;
2010 
2011 	struct rte_mbuf_ext_shared_info compbuf_info;
2012 	struct rte_mbuf_ext_shared_info decompbuf_info;
2013 
2014 	const struct rte_compressdev_capabilities *capa;
2015 
2016 	/* Compressing with CompressDev */
2017 	unsigned int zlib_compress =
2018 			(test_data->zlib_dir == ZLIB_ALL ||
2019 			test_data->zlib_dir == ZLIB_COMPRESS);
2020 	unsigned int zlib_decompress =
2021 			(test_data->zlib_dir == ZLIB_ALL ||
2022 			test_data->zlib_dir == ZLIB_DECOMPRESS);
2023 
2024 	struct test_private_arrays test_priv_data;
2025 
2026 	test_priv_data.uncomp_bufs = uncomp_bufs;
2027 	test_priv_data.comp_bufs = comp_bufs;
2028 	test_priv_data.ops = ops;
2029 	test_priv_data.ops_processed = ops_processed;
2030 	test_priv_data.priv_xforms = priv_xforms;
2031 	test_priv_data.compress_checksum = compress_checksum;
2032 	test_priv_data.compressed_data_size = compressed_data_size;
2033 
2034 	test_priv_data.stream = &stream;
2035 	test_priv_data.all_decomp_data = &all_decomp_data;
2036 	test_priv_data.decomp_produced_data_size = &decomp_produced_data_size;
2037 
2038 	test_priv_data.num_priv_xforms = 0; /* it's used for deompression only */
2039 
2040 	capa = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2041 	if (capa == NULL) {
2042 		RTE_LOG(ERR, USER1,
2043 			"Compress device does not support DEFLATE\n");
2044 		return -1;
2045 	}
2046 
2047 	/* Prepare the source mbufs with the data */
2048 	ret = test_setup_com_bufs(int_data, test_data, &test_priv_data);
2049 	if (ret < 0) {
2050 		ret_status = -1;
2051 		goto exit;
2052 	}
2053 
2054 	RTE_LOG(DEBUG, USER1, "<<< COMPRESSION >>>\n");
2055 
2056 /* COMPRESSION  */
2057 
2058 	/* Prepare output (destination) mbufs for compressed data */
2059 	ret = test_setup_output_bufs(
2060 			OPERATION_COMPRESSION,
2061 			out_of_space == 1 && !zlib_compress,
2062 			&test_priv_data,
2063 			int_data,
2064 			test_data,
2065 			&compbuf_info);
2066 	if (ret < 0) {
2067 		ret_status = -1;
2068 		goto exit;
2069 	}
2070 
2071 	/* Run compression */
2072 	ret = test_deflate_comp_run(int_data, test_data, &test_priv_data);
2073 	if (ret < 0) {
2074 		ret_status = -1;
2075 		goto exit;
2076 	}
2077 
2078 	ret = test_deflate_comp_finalize(int_data, test_data, &test_priv_data);
2079 	if (ret < 0) {
2080 		ret_status = -1;
2081 		goto exit;
2082 	} else if (ret == 1) {
2083 		ret_status = 0;
2084 		goto exit;
2085 	} else if (ret == 2) {
2086 		ret_status = 1;	 /* some operation not supported */
2087 		goto exit;
2088 	}
2089 
2090 /* DECOMPRESSION  */
2091 
2092 	RTE_LOG(DEBUG, USER1, "<<< DECOMPRESSION >>>\n");
2093 
2094 	/* Prepare output (destination) mbufs for decompressed data */
2095 	ret = test_setup_output_bufs(
2096 			OPERATION_DECOMPRESSION,
2097 			out_of_space == 1 && !zlib_decompress,
2098 			&test_priv_data,
2099 			int_data,
2100 			test_data,
2101 			&decompbuf_info);
2102 	if (ret < 0) {
2103 		ret_status = -1;
2104 		goto exit;
2105 	}
2106 
2107 	/* Run decompression */
2108 	ret = test_deflate_decomp_run(int_data, test_data, &test_priv_data);
2109 	if (ret < 0) {
2110 		ret_status = -1;
2111 		goto exit;
2112 	}
2113 
2114 	if (!zlib_decompress) {
2115 next_step:	/* next step for stateful decompression only */
2116 		ret = test_run_enqueue_dequeue(ops, ops_processed, num_bufs);
2117 		if (ret < 0) {
2118 			ret_status = -1;
2119 			RTE_LOG(ERR, USER1,
2120 				"Decompression: enqueue/dequeue operation failed\n");
2121 		}
2122 	}
2123 
2124 	ret = test_deflate_decomp_finalize(int_data, test_data, &test_priv_data);
2125 	if (ret < 0) {
2126 		ret_status = -1;
2127 		goto exit;
2128 	} else if (ret == 1) {
2129 		ret_status = 0;
2130 		goto exit;
2131 	} else if (ret == 2) {
2132 		goto next_step;
2133 	}
2134 
2135 /* FINAL PROCESSING  */
2136 
2137 	ret = test_results_validation(int_data, test_data, &test_priv_data);
2138 	if (ret < 0) {
2139 		ret_status = -1;
2140 		goto exit;
2141 	}
2142 	ret_status = 0;
2143 
2144 exit:
2145 	/* Free resources */
2146 
2147 	if (stream != NULL)
2148 		rte_compressdev_stream_free(0, stream);
2149 	if (all_decomp_data != NULL)
2150 		rte_free(all_decomp_data);
2151 
2152 	/* Free compress private xforms */
2153 	for (i = 0; i < test_priv_data.num_priv_xforms; i++) {
2154 		if (priv_xforms[i] != NULL) {
2155 			rte_compressdev_private_xform_free(0, priv_xforms[i]);
2156 			priv_xforms[i] = NULL;
2157 		}
2158 	}
2159 	for (i = 0; i < num_bufs; i++) {
2160 		rte_pktmbuf_free(uncomp_bufs[i]);
2161 		rte_pktmbuf_free(comp_bufs[i]);
2162 		rte_comp_op_free(ops[i]);
2163 		rte_comp_op_free(ops_processed[i]);
2164 	}
2165 	rte_free(contig_buf);
2166 
2167 	return ret_status;
2168 }
2169 
2170 static int
2171 test_compressdev_deflate_stateless_fixed(void)
2172 {
2173 	struct comp_testsuite_params *ts_params = &testsuite_params;
2174 	uint16_t i;
2175 	int ret;
2176 	const struct rte_compressdev_capabilities *capab;
2177 
2178 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2179 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2180 
2181 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2182 		return -ENOTSUP;
2183 
2184 	struct rte_comp_xform *compress_xform =
2185 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2186 
2187 	if (compress_xform == NULL) {
2188 		RTE_LOG(ERR, USER1,
2189 			"Compress xform could not be created\n");
2190 		ret = TEST_FAILED;
2191 		goto exit;
2192 	}
2193 
2194 	memcpy(compress_xform, ts_params->def_comp_xform,
2195 			sizeof(struct rte_comp_xform));
2196 	compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
2197 
2198 	struct interim_data_params int_data = {
2199 		NULL,
2200 		1,
2201 		NULL,
2202 		&compress_xform,
2203 		&ts_params->def_decomp_xform,
2204 		1
2205 	};
2206 
2207 	struct test_data_params test_data = {
2208 		.compress_state = RTE_COMP_OP_STATELESS,
2209 		.decompress_state = RTE_COMP_OP_STATELESS,
2210 		.buff_type = LB_BOTH,
2211 		.zlib_dir = ZLIB_DECOMPRESS,
2212 		.out_of_space = 0,
2213 		.big_data = 0,
2214 		.overflow = OVERFLOW_DISABLED,
2215 		.ratio = RATIO_ENABLED
2216 	};
2217 
2218 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2219 		int_data.test_bufs = &compress_test_bufs[i];
2220 		int_data.buf_idx = &i;
2221 
2222 		/* Compress with compressdev, decompress with Zlib */
2223 		test_data.zlib_dir = ZLIB_DECOMPRESS;
2224 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2225 		if (ret < 0)
2226 			goto exit;
2227 
2228 		/* Compress with Zlib, decompress with compressdev */
2229 		test_data.zlib_dir = ZLIB_COMPRESS;
2230 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2231 		if (ret < 0)
2232 			goto exit;
2233 	}
2234 
2235 	ret = TEST_SUCCESS;
2236 
2237 exit:
2238 	rte_free(compress_xform);
2239 	return ret;
2240 }
2241 
2242 static int
2243 test_compressdev_deflate_stateless_dynamic(void)
2244 {
2245 	struct comp_testsuite_params *ts_params = &testsuite_params;
2246 	uint16_t i;
2247 	int ret;
2248 	struct rte_comp_xform *compress_xform =
2249 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2250 
2251 	const struct rte_compressdev_capabilities *capab;
2252 
2253 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2254 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2255 
2256 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2257 		return -ENOTSUP;
2258 
2259 	if (compress_xform == NULL) {
2260 		RTE_LOG(ERR, USER1,
2261 			"Compress xform could not be created\n");
2262 		ret = TEST_FAILED;
2263 		goto exit;
2264 	}
2265 
2266 	memcpy(compress_xform, ts_params->def_comp_xform,
2267 			sizeof(struct rte_comp_xform));
2268 	compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_DYNAMIC;
2269 
2270 	struct interim_data_params int_data = {
2271 		NULL,
2272 		1,
2273 		NULL,
2274 		&compress_xform,
2275 		&ts_params->def_decomp_xform,
2276 		1
2277 	};
2278 
2279 	struct test_data_params test_data = {
2280 		.compress_state = RTE_COMP_OP_STATELESS,
2281 		.decompress_state = RTE_COMP_OP_STATELESS,
2282 		.buff_type = LB_BOTH,
2283 		.zlib_dir = ZLIB_DECOMPRESS,
2284 		.out_of_space = 0,
2285 		.big_data = 0,
2286 		.overflow = OVERFLOW_DISABLED,
2287 		.ratio = RATIO_ENABLED
2288 	};
2289 
2290 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2291 		int_data.test_bufs = &compress_test_bufs[i];
2292 		int_data.buf_idx = &i;
2293 
2294 		/* Compress with compressdev, decompress with Zlib */
2295 		test_data.zlib_dir = ZLIB_DECOMPRESS;
2296 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2297 		if (ret < 0)
2298 			goto exit;
2299 
2300 		/* Compress with Zlib, decompress with compressdev */
2301 		test_data.zlib_dir = ZLIB_COMPRESS;
2302 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2303 		if (ret < 0)
2304 			goto exit;
2305 	}
2306 
2307 	ret = TEST_SUCCESS;
2308 
2309 exit:
2310 	rte_free(compress_xform);
2311 	return ret;
2312 }
2313 
2314 static int
2315 test_compressdev_deflate_stateless_multi_op(void)
2316 {
2317 	struct comp_testsuite_params *ts_params = &testsuite_params;
2318 	uint16_t num_bufs = RTE_DIM(compress_test_bufs);
2319 	uint16_t buf_idx[num_bufs];
2320 	uint16_t i;
2321 	int ret;
2322 
2323 	for (i = 0; i < num_bufs; i++)
2324 		buf_idx[i] = i;
2325 
2326 	struct interim_data_params int_data = {
2327 		compress_test_bufs,
2328 		num_bufs,
2329 		buf_idx,
2330 		&ts_params->def_comp_xform,
2331 		&ts_params->def_decomp_xform,
2332 		1
2333 	};
2334 
2335 	struct test_data_params test_data = {
2336 		.compress_state = RTE_COMP_OP_STATELESS,
2337 		.decompress_state = RTE_COMP_OP_STATELESS,
2338 		.buff_type = LB_BOTH,
2339 		.zlib_dir = ZLIB_DECOMPRESS,
2340 		.out_of_space = 0,
2341 		.big_data = 0,
2342 		.overflow = OVERFLOW_DISABLED,
2343 		.ratio = RATIO_ENABLED
2344 	};
2345 
2346 	/* Compress with compressdev, decompress with Zlib */
2347 	test_data.zlib_dir = ZLIB_DECOMPRESS;
2348 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2349 	if (ret < 0)
2350 		return ret;
2351 
2352 	/* Compress with Zlib, decompress with compressdev */
2353 	test_data.zlib_dir = ZLIB_COMPRESS;
2354 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2355 	if (ret < 0)
2356 		return ret;
2357 
2358 	return TEST_SUCCESS;
2359 }
2360 
2361 static int
2362 test_compressdev_deflate_stateless_multi_level(void)
2363 {
2364 	struct comp_testsuite_params *ts_params = &testsuite_params;
2365 	unsigned int level;
2366 	uint16_t i;
2367 	int ret;
2368 	struct rte_comp_xform *compress_xform =
2369 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2370 
2371 	if (compress_xform == NULL) {
2372 		RTE_LOG(ERR, USER1,
2373 			"Compress xform could not be created\n");
2374 		ret = TEST_FAILED;
2375 		goto exit;
2376 	}
2377 
2378 	memcpy(compress_xform, ts_params->def_comp_xform,
2379 			sizeof(struct rte_comp_xform));
2380 
2381 	struct interim_data_params int_data = {
2382 		NULL,
2383 		1,
2384 		NULL,
2385 		&compress_xform,
2386 		&ts_params->def_decomp_xform,
2387 		1
2388 	};
2389 
2390 	struct test_data_params test_data = {
2391 		.compress_state = RTE_COMP_OP_STATELESS,
2392 		.decompress_state = RTE_COMP_OP_STATELESS,
2393 		.buff_type = LB_BOTH,
2394 		.zlib_dir = ZLIB_DECOMPRESS,
2395 		.out_of_space = 0,
2396 		.big_data = 0,
2397 		.overflow = OVERFLOW_DISABLED,
2398 		.ratio = RATIO_ENABLED
2399 	};
2400 
2401 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2402 		int_data.test_bufs = &compress_test_bufs[i];
2403 		int_data.buf_idx = &i;
2404 
2405 		for (level = RTE_COMP_LEVEL_MIN; level <= RTE_COMP_LEVEL_MAX;
2406 				level++) {
2407 			compress_xform->compress.level = level;
2408 			/* Compress with compressdev, decompress with Zlib */
2409 			test_data.zlib_dir = ZLIB_DECOMPRESS;
2410 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2411 			if (ret < 0)
2412 				goto exit;
2413 		}
2414 	}
2415 
2416 	ret = TEST_SUCCESS;
2417 
2418 exit:
2419 	rte_free(compress_xform);
2420 	return ret;
2421 }
2422 
2423 #define NUM_XFORMS 3
2424 static int
2425 test_compressdev_deflate_stateless_multi_xform(void)
2426 {
2427 	struct comp_testsuite_params *ts_params = &testsuite_params;
2428 	uint16_t num_bufs = NUM_XFORMS;
2429 	struct rte_comp_xform *compress_xforms[NUM_XFORMS] = {NULL};
2430 	struct rte_comp_xform *decompress_xforms[NUM_XFORMS] = {NULL};
2431 	const char *test_buffers[NUM_XFORMS];
2432 	uint16_t i;
2433 	unsigned int level = RTE_COMP_LEVEL_MIN;
2434 	uint16_t buf_idx[num_bufs];
2435 	int ret;
2436 
2437 	/* Create multiple xforms with various levels */
2438 	for (i = 0; i < NUM_XFORMS; i++) {
2439 		compress_xforms[i] = rte_malloc(NULL,
2440 				sizeof(struct rte_comp_xform), 0);
2441 		if (compress_xforms[i] == NULL) {
2442 			RTE_LOG(ERR, USER1,
2443 				"Compress xform could not be created\n");
2444 			ret = TEST_FAILED;
2445 			goto exit;
2446 		}
2447 
2448 		memcpy(compress_xforms[i], ts_params->def_comp_xform,
2449 				sizeof(struct rte_comp_xform));
2450 		compress_xforms[i]->compress.level = level;
2451 		level++;
2452 
2453 		decompress_xforms[i] = rte_malloc(NULL,
2454 				sizeof(struct rte_comp_xform), 0);
2455 		if (decompress_xforms[i] == NULL) {
2456 			RTE_LOG(ERR, USER1,
2457 				"Decompress xform could not be created\n");
2458 			ret = TEST_FAILED;
2459 			goto exit;
2460 		}
2461 
2462 		memcpy(decompress_xforms[i], ts_params->def_decomp_xform,
2463 				sizeof(struct rte_comp_xform));
2464 	}
2465 
2466 	for (i = 0; i < NUM_XFORMS; i++) {
2467 		buf_idx[i] = 0;
2468 		/* Use the same buffer in all sessions */
2469 		test_buffers[i] = compress_test_bufs[0];
2470 	}
2471 
2472 	struct interim_data_params int_data = {
2473 		test_buffers,
2474 		num_bufs,
2475 		buf_idx,
2476 		compress_xforms,
2477 		decompress_xforms,
2478 		NUM_XFORMS
2479 	};
2480 
2481 	struct test_data_params test_data = {
2482 		.compress_state = RTE_COMP_OP_STATELESS,
2483 		.decompress_state = RTE_COMP_OP_STATELESS,
2484 		.buff_type = LB_BOTH,
2485 		.zlib_dir = ZLIB_DECOMPRESS,
2486 		.out_of_space = 0,
2487 		.big_data = 0,
2488 		.overflow = OVERFLOW_DISABLED,
2489 		.ratio = RATIO_ENABLED
2490 	};
2491 
2492 	/* Compress with compressdev, decompress with Zlib */
2493 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2494 	if (ret < 0)
2495 		goto exit;
2496 
2497 	ret = TEST_SUCCESS;
2498 
2499 exit:
2500 	for (i = 0; i < NUM_XFORMS; i++) {
2501 		rte_free(compress_xforms[i]);
2502 		rte_free(decompress_xforms[i]);
2503 	}
2504 
2505 	return ret;
2506 }
2507 
2508 static int
2509 test_compressdev_deflate_stateless_sgl(void)
2510 {
2511 	struct comp_testsuite_params *ts_params = &testsuite_params;
2512 	uint16_t i;
2513 	int ret;
2514 	const struct rte_compressdev_capabilities *capab;
2515 
2516 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2517 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2518 
2519 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2520 		return -ENOTSUP;
2521 
2522 	struct interim_data_params int_data = {
2523 		NULL,
2524 		1,
2525 		NULL,
2526 		&ts_params->def_comp_xform,
2527 		&ts_params->def_decomp_xform,
2528 		1
2529 	};
2530 
2531 	struct test_data_params test_data = {
2532 		.compress_state = RTE_COMP_OP_STATELESS,
2533 		.decompress_state = RTE_COMP_OP_STATELESS,
2534 		.buff_type = SGL_BOTH,
2535 		.zlib_dir = ZLIB_DECOMPRESS,
2536 		.out_of_space = 0,
2537 		.big_data = 0,
2538 		.overflow = OVERFLOW_DISABLED,
2539 		.ratio = RATIO_ENABLED
2540 	};
2541 
2542 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2543 		int_data.test_bufs = &compress_test_bufs[i];
2544 		int_data.buf_idx = &i;
2545 
2546 		/* Compress with compressdev, decompress with Zlib */
2547 		test_data.zlib_dir = ZLIB_DECOMPRESS;
2548 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2549 		if (ret < 0)
2550 			return ret;
2551 
2552 		/* Compress with Zlib, decompress with compressdev */
2553 		test_data.zlib_dir = ZLIB_COMPRESS;
2554 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2555 		if (ret < 0)
2556 			return ret;
2557 
2558 		if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_LB_OUT) {
2559 			/* Compress with compressdev, decompress with Zlib */
2560 			test_data.zlib_dir = ZLIB_DECOMPRESS;
2561 			test_data.buff_type = SGL_TO_LB;
2562 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2563 			if (ret < 0)
2564 				return ret;
2565 
2566 			/* Compress with Zlib, decompress with compressdev */
2567 			test_data.zlib_dir = ZLIB_COMPRESS;
2568 			test_data.buff_type = SGL_TO_LB;
2569 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2570 			if (ret < 0)
2571 				return ret;
2572 		}
2573 
2574 		if (capab->comp_feature_flags & RTE_COMP_FF_OOP_LB_IN_SGL_OUT) {
2575 			/* Compress with compressdev, decompress with Zlib */
2576 			test_data.zlib_dir = ZLIB_DECOMPRESS;
2577 			test_data.buff_type = LB_TO_SGL;
2578 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2579 			if (ret < 0)
2580 				return ret;
2581 
2582 			/* Compress with Zlib, decompress with compressdev */
2583 			test_data.zlib_dir = ZLIB_COMPRESS;
2584 			test_data.buff_type = LB_TO_SGL;
2585 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2586 			if (ret < 0)
2587 				return ret;
2588 		}
2589 	}
2590 
2591 	return TEST_SUCCESS;
2592 }
2593 
2594 static int
2595 test_compressdev_deflate_stateless_checksum(void)
2596 {
2597 	struct comp_testsuite_params *ts_params = &testsuite_params;
2598 	uint16_t i;
2599 	int ret;
2600 	const struct rte_compressdev_capabilities *capab;
2601 
2602 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2603 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2604 
2605 	/* Check if driver supports any checksum */
2606 	if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) == 0 &&
2607 			(capab->comp_feature_flags &
2608 			RTE_COMP_FF_ADLER32_CHECKSUM) == 0 &&
2609 			(capab->comp_feature_flags &
2610 			RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) == 0)
2611 		return -ENOTSUP;
2612 
2613 	struct rte_comp_xform *compress_xform =
2614 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2615 	if (compress_xform == NULL) {
2616 		RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2617 		return TEST_FAILED;
2618 	}
2619 
2620 	memcpy(compress_xform, ts_params->def_comp_xform,
2621 			sizeof(struct rte_comp_xform));
2622 
2623 	struct rte_comp_xform *decompress_xform =
2624 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2625 	if (decompress_xform == NULL) {
2626 		RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2627 		rte_free(compress_xform);
2628 		return TEST_FAILED;
2629 	}
2630 
2631 	memcpy(decompress_xform, ts_params->def_decomp_xform,
2632 			sizeof(struct rte_comp_xform));
2633 
2634 	struct interim_data_params int_data = {
2635 		NULL,
2636 		1,
2637 		NULL,
2638 		&compress_xform,
2639 		&decompress_xform,
2640 		1
2641 	};
2642 
2643 	struct test_data_params test_data = {
2644 		.compress_state = RTE_COMP_OP_STATELESS,
2645 		.decompress_state = RTE_COMP_OP_STATELESS,
2646 		.buff_type = LB_BOTH,
2647 		.zlib_dir = ZLIB_DECOMPRESS,
2648 		.out_of_space = 0,
2649 		.big_data = 0,
2650 		.overflow = OVERFLOW_DISABLED,
2651 		.ratio = RATIO_ENABLED
2652 	};
2653 
2654 	/* Check if driver supports crc32 checksum and test */
2655 	if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM)) {
2656 		compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
2657 		decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
2658 
2659 		for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2660 			/* Compress with compressdev, decompress with Zlib */
2661 			int_data.test_bufs = &compress_test_bufs[i];
2662 			int_data.buf_idx = &i;
2663 
2664 			/* Generate zlib checksum and test against selected
2665 			 * drivers decompression checksum
2666 			 */
2667 			test_data.zlib_dir = ZLIB_COMPRESS;
2668 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2669 			if (ret < 0)
2670 				goto exit;
2671 
2672 			/* Generate compression and decompression
2673 			 * checksum of selected driver
2674 			 */
2675 			test_data.zlib_dir = ZLIB_NONE;
2676 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2677 			if (ret < 0)
2678 				goto exit;
2679 		}
2680 	}
2681 
2682 	/* Check if driver supports adler32 checksum and test */
2683 	if ((capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM)) {
2684 		compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2685 		decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
2686 
2687 		for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2688 			int_data.test_bufs = &compress_test_bufs[i];
2689 			int_data.buf_idx = &i;
2690 
2691 			/* Generate zlib checksum and test against selected
2692 			 * drivers decompression checksum
2693 			 */
2694 			test_data.zlib_dir = ZLIB_COMPRESS;
2695 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2696 			if (ret < 0)
2697 				goto exit;
2698 			/* Generate compression and decompression
2699 			 * checksum of selected driver
2700 			 */
2701 			test_data.zlib_dir = ZLIB_NONE;
2702 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2703 			if (ret < 0)
2704 				goto exit;
2705 		}
2706 	}
2707 
2708 	/* Check if driver supports combined crc and adler checksum and test */
2709 	if ((capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)) {
2710 		compress_xform->compress.chksum =
2711 				RTE_COMP_CHECKSUM_CRC32_ADLER32;
2712 		decompress_xform->decompress.chksum =
2713 				RTE_COMP_CHECKSUM_CRC32_ADLER32;
2714 
2715 		for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
2716 			int_data.test_bufs = &compress_test_bufs[i];
2717 			int_data.buf_idx = &i;
2718 
2719 			/* Generate compression and decompression
2720 			 * checksum of selected driver
2721 			 */
2722 			test_data.zlib_dir = ZLIB_NONE;
2723 			ret = test_deflate_comp_decomp(&int_data, &test_data);
2724 			if (ret < 0)
2725 				goto exit;
2726 		}
2727 	}
2728 
2729 	ret = TEST_SUCCESS;
2730 
2731 exit:
2732 	rte_free(compress_xform);
2733 	rte_free(decompress_xform);
2734 	return ret;
2735 }
2736 
2737 static int
2738 test_compressdev_out_of_space_buffer(void)
2739 {
2740 	struct comp_testsuite_params *ts_params = &testsuite_params;
2741 	int ret;
2742 	uint16_t i;
2743 	const struct rte_compressdev_capabilities *capab;
2744 
2745 	RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
2746 
2747 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2748 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2749 
2750 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
2751 		return -ENOTSUP;
2752 
2753 	struct interim_data_params int_data = {
2754 		&compress_test_bufs[0],
2755 		1,
2756 		&i,
2757 		&ts_params->def_comp_xform,
2758 		&ts_params->def_decomp_xform,
2759 		1
2760 	};
2761 
2762 	struct test_data_params test_data = {
2763 		.compress_state = RTE_COMP_OP_STATELESS,
2764 		.decompress_state = RTE_COMP_OP_STATELESS,
2765 		.buff_type = LB_BOTH,
2766 		.zlib_dir = ZLIB_DECOMPRESS,
2767 		.out_of_space = 1,  /* run out-of-space test */
2768 		.big_data = 0,
2769 		.overflow = OVERFLOW_DISABLED,
2770 		.ratio = RATIO_ENABLED
2771 	};
2772 	/* Compress with compressdev, decompress with Zlib */
2773 	test_data.zlib_dir = ZLIB_DECOMPRESS;
2774 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2775 	if (ret < 0)
2776 		goto exit;
2777 
2778 	/* Compress with Zlib, decompress with compressdev */
2779 	test_data.zlib_dir = ZLIB_COMPRESS;
2780 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2781 	if (ret < 0)
2782 		goto exit;
2783 
2784 	if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2785 		/* Compress with compressdev, decompress with Zlib */
2786 		test_data.zlib_dir = ZLIB_DECOMPRESS;
2787 		test_data.buff_type = SGL_BOTH;
2788 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2789 		if (ret < 0)
2790 			goto exit;
2791 
2792 		/* Compress with Zlib, decompress with compressdev */
2793 		test_data.zlib_dir = ZLIB_COMPRESS;
2794 		test_data.buff_type = SGL_BOTH;
2795 		ret = test_deflate_comp_decomp(&int_data, &test_data);
2796 		if (ret < 0)
2797 			goto exit;
2798 	}
2799 
2800 	ret  = TEST_SUCCESS;
2801 
2802 exit:
2803 	return ret;
2804 }
2805 
2806 static int
2807 test_compressdev_deflate_stateless_dynamic_big(void)
2808 {
2809 	struct comp_testsuite_params *ts_params = &testsuite_params;
2810 	uint16_t i = 0;
2811 	int ret;
2812 	unsigned int j;
2813 	const struct rte_compressdev_capabilities *capab;
2814 	char *test_buffer = NULL;
2815 
2816 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2817 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2818 
2819 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
2820 		return -ENOTSUP;
2821 
2822 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
2823 		return -ENOTSUP;
2824 
2825 	test_buffer = rte_malloc(NULL, BIG_DATA_TEST_SIZE, 0);
2826 	if (test_buffer == NULL) {
2827 		RTE_LOG(ERR, USER1,
2828 			"Can't allocate buffer for big-data\n");
2829 		return TEST_FAILED;
2830 	}
2831 
2832 	struct interim_data_params int_data = {
2833 		(const char * const *)&test_buffer,
2834 		1,
2835 		&i,
2836 		&ts_params->def_comp_xform,
2837 		&ts_params->def_decomp_xform,
2838 		1
2839 	};
2840 
2841 	struct test_data_params test_data = {
2842 		.compress_state = RTE_COMP_OP_STATELESS,
2843 		.decompress_state = RTE_COMP_OP_STATELESS,
2844 		.buff_type = SGL_BOTH,
2845 		.zlib_dir = ZLIB_DECOMPRESS,
2846 		.out_of_space = 0,
2847 		.big_data = 1,
2848 		.overflow = OVERFLOW_DISABLED,
2849 		.ratio = RATIO_DISABLED
2850 	};
2851 
2852 	ts_params->def_comp_xform->compress.deflate.huffman =
2853 						RTE_COMP_HUFFMAN_DYNAMIC;
2854 
2855 	/* fill the buffer with data based on rand. data */
2856 	srand(BIG_DATA_TEST_SIZE);
2857 	for (j = 0; j < BIG_DATA_TEST_SIZE - 1; ++j)
2858 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
2859 	test_buffer[BIG_DATA_TEST_SIZE - 1] = 0;
2860 
2861 	/* Compress with compressdev, decompress with Zlib */
2862 	test_data.zlib_dir = ZLIB_DECOMPRESS;
2863 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2864 	if (ret < 0)
2865 		goto exit;
2866 
2867 	/* Compress with Zlib, decompress with compressdev */
2868 	test_data.zlib_dir = ZLIB_COMPRESS;
2869 	ret = test_deflate_comp_decomp(&int_data, &test_data);
2870 	if (ret < 0)
2871 		goto exit;
2872 
2873 	ret = TEST_SUCCESS;
2874 
2875 exit:
2876 	ts_params->def_comp_xform->compress.deflate.huffman =
2877 						RTE_COMP_HUFFMAN_DEFAULT;
2878 	rte_free(test_buffer);
2879 	return ret;
2880 }
2881 
2882 static int
2883 test_compressdev_deflate_stateful_decomp(void)
2884 {
2885 	struct comp_testsuite_params *ts_params = &testsuite_params;
2886 	int ret;
2887 	uint16_t i;
2888 	const struct rte_compressdev_capabilities *capab;
2889 
2890 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2891 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2892 
2893 	if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2894 		return -ENOTSUP;
2895 
2896 	struct interim_data_params int_data = {
2897 		&compress_test_bufs[0],
2898 		1,
2899 		&i,
2900 		&ts_params->def_comp_xform,
2901 		&ts_params->def_decomp_xform,
2902 		1
2903 	};
2904 
2905 	struct test_data_params test_data = {
2906 		.compress_state = RTE_COMP_OP_STATELESS,
2907 		.decompress_state = RTE_COMP_OP_STATEFUL,
2908 		.buff_type = LB_BOTH,
2909 		.zlib_dir = ZLIB_COMPRESS,
2910 		.out_of_space = 0,
2911 		.big_data = 0,
2912 		.decompress_output_block_size = 2000,
2913 		.decompress_steps_max = 4,
2914 		.overflow = OVERFLOW_DISABLED,
2915 		.ratio = RATIO_ENABLED
2916 	};
2917 
2918 	/* Compress with Zlib, decompress with compressdev */
2919 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2920 		ret = TEST_FAILED;
2921 		goto exit;
2922 	}
2923 
2924 	if (capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
2925 		/* Now test with SGL buffers */
2926 		test_data.buff_type = SGL_BOTH;
2927 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
2928 			ret = TEST_FAILED;
2929 			goto exit;
2930 		}
2931 	}
2932 
2933 	ret  = TEST_SUCCESS;
2934 
2935 exit:
2936 	return ret;
2937 }
2938 
2939 static int
2940 test_compressdev_deflate_stateful_decomp_checksum(void)
2941 {
2942 	struct comp_testsuite_params *ts_params = &testsuite_params;
2943 	int ret;
2944 	uint16_t i;
2945 	const struct rte_compressdev_capabilities *capab;
2946 
2947 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
2948 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
2949 
2950 	if (!(capab->comp_feature_flags & RTE_COMP_FF_STATEFUL_DECOMPRESSION))
2951 		return -ENOTSUP;
2952 
2953 	/* Check if driver supports any checksum */
2954 	if (!(capab->comp_feature_flags &
2955 	     (RTE_COMP_FF_CRC32_CHECKSUM | RTE_COMP_FF_ADLER32_CHECKSUM |
2956 	      RTE_COMP_FF_CRC32_ADLER32_CHECKSUM)))
2957 		return -ENOTSUP;
2958 
2959 	struct rte_comp_xform *compress_xform =
2960 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2961 	if (compress_xform == NULL) {
2962 		RTE_LOG(ERR, USER1, "Compress xform could not be created\n");
2963 		return TEST_FAILED;
2964 	}
2965 
2966 	memcpy(compress_xform, ts_params->def_comp_xform,
2967 	       sizeof(struct rte_comp_xform));
2968 
2969 	struct rte_comp_xform *decompress_xform =
2970 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
2971 	if (decompress_xform == NULL) {
2972 		RTE_LOG(ERR, USER1, "Decompress xform could not be created\n");
2973 		rte_free(compress_xform);
2974 		return TEST_FAILED;
2975 	}
2976 
2977 	memcpy(decompress_xform, ts_params->def_decomp_xform,
2978 	       sizeof(struct rte_comp_xform));
2979 
2980 	struct interim_data_params int_data = {
2981 		&compress_test_bufs[0],
2982 		1,
2983 		&i,
2984 		&compress_xform,
2985 		&decompress_xform,
2986 		1
2987 	};
2988 
2989 	struct test_data_params test_data = {
2990 		.compress_state = RTE_COMP_OP_STATELESS,
2991 		.decompress_state = RTE_COMP_OP_STATEFUL,
2992 		.buff_type = LB_BOTH,
2993 		.zlib_dir = ZLIB_COMPRESS,
2994 		.out_of_space = 0,
2995 		.big_data = 0,
2996 		.decompress_output_block_size = 2000,
2997 		.decompress_steps_max = 4,
2998 		.overflow = OVERFLOW_DISABLED,
2999 		.ratio = RATIO_ENABLED
3000 	};
3001 
3002 	/* Check if driver supports crc32 checksum and test */
3003 	if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_CHECKSUM) {
3004 		compress_xform->compress.chksum = RTE_COMP_CHECKSUM_CRC32;
3005 		decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_CRC32;
3006 		/* Compress with Zlib, decompress with compressdev */
3007 		test_data.buff_type = LB_BOTH;
3008 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3009 			ret = TEST_FAILED;
3010 			goto exit;
3011 		}
3012 		if (capab->comp_feature_flags &
3013 				RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3014 			/* Now test with SGL buffers */
3015 			test_data.buff_type = SGL_BOTH;
3016 			if (test_deflate_comp_decomp(&int_data,
3017 						     &test_data) < 0) {
3018 				ret = TEST_FAILED;
3019 				goto exit;
3020 			}
3021 		}
3022 	}
3023 
3024 	/* Check if driver supports adler32 checksum and test */
3025 	if (capab->comp_feature_flags & RTE_COMP_FF_ADLER32_CHECKSUM) {
3026 		compress_xform->compress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3027 		decompress_xform->decompress.chksum = RTE_COMP_CHECKSUM_ADLER32;
3028 		/* Compress with Zlib, decompress with compressdev */
3029 		test_data.buff_type = LB_BOTH;
3030 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3031 			ret = TEST_FAILED;
3032 			goto exit;
3033 		}
3034 		if (capab->comp_feature_flags &
3035 				RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3036 			/* Now test with SGL buffers */
3037 			test_data.buff_type = SGL_BOTH;
3038 			if (test_deflate_comp_decomp(&int_data,
3039 						     &test_data) < 0) {
3040 				ret = TEST_FAILED;
3041 				goto exit;
3042 			}
3043 		}
3044 	}
3045 
3046 	/* Check if driver supports combined crc and adler checksum and test */
3047 	if (capab->comp_feature_flags & RTE_COMP_FF_CRC32_ADLER32_CHECKSUM) {
3048 		compress_xform->compress.chksum =
3049 				RTE_COMP_CHECKSUM_CRC32_ADLER32;
3050 		decompress_xform->decompress.chksum =
3051 				RTE_COMP_CHECKSUM_CRC32_ADLER32;
3052 		/* Zlib doesn't support combined checksum */
3053 		test_data.zlib_dir = ZLIB_NONE;
3054 		/* Compress stateless, decompress stateful with compressdev */
3055 		test_data.buff_type = LB_BOTH;
3056 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3057 			ret = TEST_FAILED;
3058 			goto exit;
3059 		}
3060 		if (capab->comp_feature_flags &
3061 				RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) {
3062 			/* Now test with SGL buffers */
3063 			test_data.buff_type = SGL_BOTH;
3064 			if (test_deflate_comp_decomp(&int_data,
3065 						     &test_data) < 0) {
3066 				ret = TEST_FAILED;
3067 				goto exit;
3068 			}
3069 		}
3070 	}
3071 
3072 	ret  = TEST_SUCCESS;
3073 
3074 exit:
3075 	rte_free(compress_xform);
3076 	rte_free(decompress_xform);
3077 	return ret;
3078 }
3079 
3080 static const struct rte_memzone *
3081 make_memzone(const char *name, size_t size)
3082 {
3083 	unsigned int socket_id = rte_socket_id();
3084 	char mz_name[RTE_MEMZONE_NAMESIZE];
3085 	const struct rte_memzone *memzone;
3086 
3087 	snprintf(mz_name, RTE_MEMZONE_NAMESIZE, "%s_%u", name, socket_id);
3088 	memzone = rte_memzone_lookup(mz_name);
3089 	if (memzone != NULL && memzone->len != size) {
3090 		rte_memzone_free(memzone);
3091 		memzone = NULL;
3092 	}
3093 	if (memzone == NULL) {
3094 		memzone = rte_memzone_reserve_aligned(mz_name, size, socket_id,
3095 				RTE_MEMZONE_IOVA_CONTIG, RTE_CACHE_LINE_SIZE);
3096 		if (memzone == NULL)
3097 			RTE_LOG(ERR, USER1, "Can't allocate memory zone %s",
3098 				mz_name);
3099 	}
3100 	return memzone;
3101 }
3102 
3103 static int
3104 test_compressdev_external_mbufs(void)
3105 {
3106 	struct comp_testsuite_params *ts_params = &testsuite_params;
3107 	size_t data_len = 0;
3108 	uint16_t i;
3109 	int ret = TEST_FAILED;
3110 
3111 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++)
3112 		data_len = RTE_MAX(data_len, strlen(compress_test_bufs[i]) + 1);
3113 
3114 	struct interim_data_params int_data = {
3115 		NULL,
3116 		1,
3117 		NULL,
3118 		&ts_params->def_comp_xform,
3119 		&ts_params->def_decomp_xform,
3120 		1
3121 	};
3122 
3123 	struct test_data_params test_data = {
3124 		.compress_state = RTE_COMP_OP_STATELESS,
3125 		.decompress_state = RTE_COMP_OP_STATELESS,
3126 		.buff_type = LB_BOTH,
3127 		.zlib_dir = ZLIB_DECOMPRESS,
3128 		.out_of_space = 0,
3129 		.big_data = 0,
3130 		.use_external_mbufs = 1,
3131 		.inbuf_data_size = data_len,
3132 		.inbuf_memzone = make_memzone("inbuf", data_len),
3133 		.compbuf_memzone = make_memzone("compbuf", data_len *
3134 						COMPRESS_BUF_SIZE_RATIO),
3135 		.uncompbuf_memzone = make_memzone("decompbuf", data_len),
3136 		.overflow = OVERFLOW_DISABLED
3137 	};
3138 
3139 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3140 		/* prepare input data */
3141 		data_len = strlen(compress_test_bufs[i]) + 1;
3142 		rte_memcpy(test_data.inbuf_memzone->addr, compress_test_bufs[i],
3143 			   data_len);
3144 		test_data.inbuf_data_size = data_len;
3145 		int_data.buf_idx = &i;
3146 
3147 		/* Compress with compressdev, decompress with Zlib */
3148 		test_data.zlib_dir = ZLIB_DECOMPRESS;
3149 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3150 			goto exit;
3151 
3152 		/* Compress with Zlib, decompress with compressdev */
3153 		test_data.zlib_dir = ZLIB_COMPRESS;
3154 		if (test_deflate_comp_decomp(&int_data, &test_data) < 0)
3155 			goto exit;
3156 	}
3157 
3158 	ret = TEST_SUCCESS;
3159 
3160 exit:
3161 	rte_memzone_free(test_data.inbuf_memzone);
3162 	rte_memzone_free(test_data.compbuf_memzone);
3163 	rte_memzone_free(test_data.uncompbuf_memzone);
3164 	return ret;
3165 }
3166 
3167 static int
3168 test_compressdev_deflate_stateless_fixed_oos_recoverable(void)
3169 {
3170 	struct comp_testsuite_params *ts_params = &testsuite_params;
3171 	uint16_t i;
3172 	int ret;
3173 	int comp_result;
3174 	const struct rte_compressdev_capabilities *capab;
3175 
3176 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3177 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3178 
3179 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_FIXED) == 0)
3180 		return -ENOTSUP;
3181 
3182 	struct rte_comp_xform *compress_xform =
3183 			rte_malloc(NULL, sizeof(struct rte_comp_xform), 0);
3184 
3185 	if (compress_xform == NULL) {
3186 		RTE_LOG(ERR, USER1,
3187 			"Compress xform could not be created\n");
3188 		ret = TEST_FAILED;
3189 		goto exit;
3190 	}
3191 
3192 	memcpy(compress_xform, ts_params->def_comp_xform,
3193 			sizeof(struct rte_comp_xform));
3194 	compress_xform->compress.deflate.huffman = RTE_COMP_HUFFMAN_FIXED;
3195 
3196 	struct interim_data_params int_data = {
3197 		NULL,
3198 		1,
3199 		NULL,
3200 		&compress_xform,
3201 		&ts_params->def_decomp_xform,
3202 		1
3203 	};
3204 
3205 	struct test_data_params test_data = {
3206 		.compress_state = RTE_COMP_OP_STATELESS,
3207 		.decompress_state = RTE_COMP_OP_STATELESS,
3208 		.buff_type = LB_BOTH,
3209 		.zlib_dir = ZLIB_DECOMPRESS,
3210 		.out_of_space = 0,
3211 		.big_data = 0,
3212 		.overflow = OVERFLOW_ENABLED,
3213 		.ratio = RATIO_ENABLED
3214 	};
3215 
3216 	for (i = 0; i < RTE_DIM(compress_test_bufs); i++) {
3217 		int_data.test_bufs = &compress_test_bufs[i];
3218 		int_data.buf_idx = &i;
3219 
3220 		/* Compress with compressdev, decompress with Zlib */
3221 		test_data.zlib_dir = ZLIB_DECOMPRESS;
3222 		comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3223 		if (comp_result < 0) {
3224 			ret = TEST_FAILED;
3225 			goto exit;
3226 		} else if (comp_result > 0) {
3227 			ret = -ENOTSUP;
3228 			goto exit;
3229 		}
3230 
3231 		/* Compress with Zlib, decompress with compressdev */
3232 		test_data.zlib_dir = ZLIB_COMPRESS;
3233 		comp_result = test_deflate_comp_decomp(&int_data, &test_data);
3234 		if (comp_result < 0) {
3235 			ret = TEST_FAILED;
3236 			goto exit;
3237 		} else if (comp_result > 0) {
3238 			ret = -ENOTSUP;
3239 			goto exit;
3240 		}
3241 	}
3242 
3243 	ret = TEST_SUCCESS;
3244 
3245 exit:
3246 	rte_free(compress_xform);
3247 	return ret;
3248 }
3249 
3250 static int
3251 test_compressdev_deflate_im_buffers_LB_1op(void)
3252 {
3253 	struct comp_testsuite_params *ts_params = &testsuite_params;
3254 	uint16_t i = 0;
3255 	int ret = TEST_SUCCESS;
3256 	int j;
3257 	const struct rte_compressdev_capabilities *capab;
3258 	char *test_buffer = NULL;
3259 
3260 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3261 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3262 
3263 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3264 		return -ENOTSUP;
3265 
3266 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3267 		return -ENOTSUP;
3268 
3269 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3270 	if (test_buffer == NULL) {
3271 		RTE_LOG(ERR, USER1,
3272 			"Can't allocate buffer for 'im buffer' test\n");
3273 		return TEST_FAILED;
3274 	}
3275 
3276 	struct interim_data_params int_data = {
3277 		(const char * const *)&test_buffer,
3278 		1,
3279 		&i,
3280 		&ts_params->def_comp_xform,
3281 		&ts_params->def_decomp_xform,
3282 		1
3283 	};
3284 
3285 	struct test_data_params test_data = {
3286 		.compress_state = RTE_COMP_OP_STATELESS,
3287 		.decompress_state = RTE_COMP_OP_STATELESS,
3288 				/* must be LB to SGL,
3289 				 * input LB buffer reaches its maximum,
3290 				 * if ratio 1.3 than another mbuf must be
3291 				 * created and attached
3292 				 */
3293 		.buff_type = LB_BOTH,
3294 		.zlib_dir = ZLIB_NONE,
3295 		.out_of_space = 0,
3296 		.big_data = 1,
3297 		.overflow = OVERFLOW_DISABLED,
3298 		.ratio = RATIO_DISABLED
3299 	};
3300 
3301 	ts_params->def_comp_xform->compress.deflate.huffman =
3302 			RTE_COMP_HUFFMAN_DYNAMIC;
3303 
3304 	/* fill the buffer with data based on rand. data */
3305 	srand(IM_BUF_DATA_TEST_SIZE_LB);
3306 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3307 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3308 
3309 	/* Compress with compressdev, decompress with compressdev */
3310 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3311 		ret = TEST_FAILED;
3312 		goto end;
3313 	}
3314 
3315 end:
3316 	ts_params->def_comp_xform->compress.deflate.huffman =
3317 			RTE_COMP_HUFFMAN_DEFAULT;
3318 	rte_free(test_buffer);
3319 	return ret;
3320 }
3321 
3322 static int
3323 test_compressdev_deflate_im_buffers_LB_2ops_first(void)
3324 {
3325 	struct comp_testsuite_params *ts_params = &testsuite_params;
3326 	uint16_t i = 0;
3327 	int ret = TEST_SUCCESS;
3328 	int j;
3329 	const struct rte_compressdev_capabilities *capab;
3330 	char *test_buffer = NULL;
3331 	const char *test_buffers[2];
3332 
3333 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3334 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3335 
3336 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3337 		return -ENOTSUP;
3338 
3339 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3340 		return -ENOTSUP;
3341 
3342 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3343 	if (test_buffer == NULL) {
3344 		RTE_LOG(ERR, USER1,
3345 			"Can't allocate buffer for 'im buffer' test\n");
3346 		return TEST_FAILED;
3347 	}
3348 
3349 	test_buffers[0] = test_buffer;
3350 	test_buffers[1] = compress_test_bufs[0];
3351 
3352 	struct interim_data_params int_data = {
3353 		(const char * const *)test_buffers,
3354 		2,
3355 		&i,
3356 		&ts_params->def_comp_xform,
3357 		&ts_params->def_decomp_xform,
3358 		1
3359 	};
3360 
3361 	struct test_data_params test_data = {
3362 		.compress_state = RTE_COMP_OP_STATELESS,
3363 		.decompress_state = RTE_COMP_OP_STATELESS,
3364 		.buff_type = LB_BOTH,
3365 		.zlib_dir = ZLIB_NONE,
3366 		.out_of_space = 0,
3367 		.big_data = 1,
3368 		.overflow = OVERFLOW_DISABLED,
3369 		.ratio = RATIO_DISABLED
3370 	};
3371 
3372 	ts_params->def_comp_xform->compress.deflate.huffman =
3373 			RTE_COMP_HUFFMAN_DYNAMIC;
3374 
3375 	/* fill the buffer with data based on rand. data */
3376 	srand(IM_BUF_DATA_TEST_SIZE_LB);
3377 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3378 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3379 
3380 	/* Compress with compressdev, decompress with compressdev */
3381 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3382 		ret = TEST_FAILED;
3383 		goto end;
3384 	}
3385 
3386 end:
3387 	ts_params->def_comp_xform->compress.deflate.huffman =
3388 			RTE_COMP_HUFFMAN_DEFAULT;
3389 	rte_free(test_buffer);
3390 	return ret;
3391 }
3392 
3393 static int
3394 test_compressdev_deflate_im_buffers_LB_2ops_second(void)
3395 {
3396 	struct comp_testsuite_params *ts_params = &testsuite_params;
3397 	uint16_t i = 0;
3398 	int ret = TEST_SUCCESS;
3399 	int j;
3400 	const struct rte_compressdev_capabilities *capab;
3401 	char *test_buffer = NULL;
3402 	const char *test_buffers[2];
3403 
3404 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3405 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3406 
3407 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3408 		return -ENOTSUP;
3409 
3410 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3411 		return -ENOTSUP;
3412 
3413 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3414 	if (test_buffer == NULL) {
3415 		RTE_LOG(ERR, USER1,
3416 			"Can't allocate buffer for 'im buffer' test\n");
3417 		return TEST_FAILED;
3418 	}
3419 
3420 	test_buffers[0] = compress_test_bufs[0];
3421 	test_buffers[1] = test_buffer;
3422 
3423 	struct interim_data_params int_data = {
3424 		(const char * const *)test_buffers,
3425 		2,
3426 		&i,
3427 		&ts_params->def_comp_xform,
3428 		&ts_params->def_decomp_xform,
3429 		1
3430 	};
3431 
3432 	struct test_data_params test_data = {
3433 		.compress_state = RTE_COMP_OP_STATELESS,
3434 		.decompress_state = RTE_COMP_OP_STATELESS,
3435 		.buff_type = LB_BOTH,
3436 		.zlib_dir = ZLIB_NONE,
3437 		.out_of_space = 0,
3438 		.big_data = 1,
3439 		.overflow = OVERFLOW_DISABLED,
3440 		.ratio = RATIO_DISABLED
3441 	};
3442 
3443 	ts_params->def_comp_xform->compress.deflate.huffman =
3444 			RTE_COMP_HUFFMAN_DYNAMIC;
3445 
3446 	/* fill the buffer with data based on rand. data */
3447 	srand(IM_BUF_DATA_TEST_SIZE_LB);
3448 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3449 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3450 
3451 	/* Compress with compressdev, decompress with compressdev */
3452 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3453 		ret = TEST_FAILED;
3454 		goto end;
3455 	}
3456 
3457 end:
3458 	ts_params->def_comp_xform->compress.deflate.huffman =
3459 			RTE_COMP_HUFFMAN_DEFAULT;
3460 	rte_free(test_buffer);
3461 	return ret;
3462 }
3463 
3464 static int
3465 test_compressdev_deflate_im_buffers_LB_3ops(void)
3466 {
3467 	struct comp_testsuite_params *ts_params = &testsuite_params;
3468 	uint16_t i = 0;
3469 	int ret = TEST_SUCCESS;
3470 	int j;
3471 	const struct rte_compressdev_capabilities *capab;
3472 	char *test_buffer = NULL;
3473 	const char *test_buffers[3];
3474 
3475 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3476 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3477 
3478 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3479 		return -ENOTSUP;
3480 
3481 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3482 		return -ENOTSUP;
3483 
3484 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3485 	if (test_buffer == NULL) {
3486 		RTE_LOG(ERR, USER1,
3487 			"Can't allocate buffer for 'im buffer' test\n");
3488 		return TEST_FAILED;
3489 	}
3490 
3491 	test_buffers[0] = compress_test_bufs[0];
3492 	test_buffers[1] = test_buffer;
3493 	test_buffers[2] = compress_test_bufs[1];
3494 
3495 	struct interim_data_params int_data = {
3496 		(const char * const *)test_buffers,
3497 		3,
3498 		&i,
3499 		&ts_params->def_comp_xform,
3500 		&ts_params->def_decomp_xform,
3501 		1
3502 	};
3503 
3504 	struct test_data_params test_data = {
3505 		.compress_state = RTE_COMP_OP_STATELESS,
3506 		.decompress_state = RTE_COMP_OP_STATELESS,
3507 		.buff_type = LB_BOTH,
3508 		.zlib_dir = ZLIB_NONE,
3509 		.out_of_space = 0,
3510 		.big_data = 1,
3511 		.overflow = OVERFLOW_DISABLED,
3512 		.ratio = RATIO_DISABLED
3513 	};
3514 
3515 	ts_params->def_comp_xform->compress.deflate.huffman =
3516 			RTE_COMP_HUFFMAN_DYNAMIC;
3517 
3518 	/* fill the buffer with data based on rand. data */
3519 	srand(IM_BUF_DATA_TEST_SIZE_LB);
3520 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3521 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3522 
3523 	/* Compress with compressdev, decompress with compressdev */
3524 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3525 		ret = TEST_FAILED;
3526 		goto end;
3527 	}
3528 
3529 end:
3530 	ts_params->def_comp_xform->compress.deflate.huffman =
3531 			RTE_COMP_HUFFMAN_DEFAULT;
3532 	rte_free(test_buffer);
3533 	return ret;
3534 }
3535 
3536 static int
3537 test_compressdev_deflate_im_buffers_LB_4ops(void)
3538 {
3539 	struct comp_testsuite_params *ts_params = &testsuite_params;
3540 	uint16_t i = 0;
3541 	int ret = TEST_SUCCESS;
3542 	int j;
3543 	const struct rte_compressdev_capabilities *capab;
3544 	char *test_buffer = NULL;
3545 	const char *test_buffers[4];
3546 
3547 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3548 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3549 
3550 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3551 		return -ENOTSUP;
3552 
3553 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3554 		return -ENOTSUP;
3555 
3556 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_LB, 0);
3557 	if (test_buffer == NULL) {
3558 		RTE_LOG(ERR, USER1,
3559 			"Can't allocate buffer for 'im buffer' test\n");
3560 		return TEST_FAILED;
3561 	}
3562 
3563 	test_buffers[0] = compress_test_bufs[0];
3564 	test_buffers[1] = test_buffer;
3565 	test_buffers[2] = compress_test_bufs[1];
3566 	test_buffers[3] = test_buffer;
3567 
3568 	struct interim_data_params int_data = {
3569 		(const char * const *)test_buffers,
3570 		4,
3571 		&i,
3572 		&ts_params->def_comp_xform,
3573 		&ts_params->def_decomp_xform,
3574 		1
3575 	};
3576 
3577 	struct test_data_params test_data = {
3578 		.compress_state = RTE_COMP_OP_STATELESS,
3579 		.decompress_state = RTE_COMP_OP_STATELESS,
3580 		.buff_type = LB_BOTH,
3581 		.zlib_dir = ZLIB_NONE,
3582 		.out_of_space = 0,
3583 		.big_data = 1,
3584 		.overflow = OVERFLOW_DISABLED,
3585 		.ratio = RATIO_DISABLED
3586 	};
3587 
3588 	ts_params->def_comp_xform->compress.deflate.huffman =
3589 			RTE_COMP_HUFFMAN_DYNAMIC;
3590 
3591 	/* fill the buffer with data based on rand. data */
3592 	srand(IM_BUF_DATA_TEST_SIZE_LB);
3593 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_LB - 1; ++j)
3594 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3595 
3596 	/* Compress with compressdev, decompress with compressdev */
3597 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3598 		ret = TEST_FAILED;
3599 		goto end;
3600 	}
3601 
3602 end:
3603 	ts_params->def_comp_xform->compress.deflate.huffman =
3604 			RTE_COMP_HUFFMAN_DEFAULT;
3605 	rte_free(test_buffer);
3606 	return ret;
3607 }
3608 
3609 
3610 static int
3611 test_compressdev_deflate_im_buffers_SGL_1op(void)
3612 {
3613 	struct comp_testsuite_params *ts_params = &testsuite_params;
3614 	uint16_t i = 0;
3615 	int ret = TEST_SUCCESS;
3616 	int j;
3617 	const struct rte_compressdev_capabilities *capab;
3618 	char *test_buffer = NULL;
3619 
3620 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3621 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3622 
3623 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3624 		return -ENOTSUP;
3625 
3626 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3627 		return -ENOTSUP;
3628 
3629 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3630 	if (test_buffer == NULL) {
3631 		RTE_LOG(ERR, USER1,
3632 			"Can't allocate buffer for big-data\n");
3633 		return TEST_FAILED;
3634 	}
3635 
3636 	struct interim_data_params int_data = {
3637 		(const char * const *)&test_buffer,
3638 		1,
3639 		&i,
3640 		&ts_params->def_comp_xform,
3641 		&ts_params->def_decomp_xform,
3642 		1
3643 	};
3644 
3645 	struct test_data_params test_data = {
3646 		.compress_state = RTE_COMP_OP_STATELESS,
3647 		.decompress_state = RTE_COMP_OP_STATELESS,
3648 		.buff_type = SGL_BOTH,
3649 		.zlib_dir = ZLIB_NONE,
3650 		.out_of_space = 0,
3651 		.big_data = 1,
3652 		.overflow = OVERFLOW_DISABLED,
3653 		.ratio = RATIO_DISABLED
3654 	};
3655 
3656 	ts_params->def_comp_xform->compress.deflate.huffman =
3657 			RTE_COMP_HUFFMAN_DYNAMIC;
3658 
3659 	/* fill the buffer with data based on rand. data */
3660 	srand(IM_BUF_DATA_TEST_SIZE_SGL);
3661 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3662 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3663 
3664 	/* Compress with compressdev, decompress with compressdev */
3665 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3666 		ret = TEST_FAILED;
3667 		goto end;
3668 	}
3669 
3670 end:
3671 	ts_params->def_comp_xform->compress.deflate.huffman =
3672 			RTE_COMP_HUFFMAN_DEFAULT;
3673 	rte_free(test_buffer);
3674 	return ret;
3675 }
3676 
3677 static int
3678 test_compressdev_deflate_im_buffers_SGL_2ops_first(void)
3679 {
3680 	struct comp_testsuite_params *ts_params = &testsuite_params;
3681 	uint16_t i = 0;
3682 	int ret = TEST_SUCCESS;
3683 	int j;
3684 	const struct rte_compressdev_capabilities *capab;
3685 	char *test_buffer = NULL;
3686 	const char *test_buffers[2];
3687 
3688 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3689 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3690 
3691 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3692 		return -ENOTSUP;
3693 
3694 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3695 		return -ENOTSUP;
3696 
3697 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3698 	if (test_buffer == NULL) {
3699 		RTE_LOG(ERR, USER1,
3700 			"Can't allocate buffer for big-data\n");
3701 		return TEST_FAILED;
3702 	}
3703 
3704 	test_buffers[0] = test_buffer;
3705 	test_buffers[1] = compress_test_bufs[0];
3706 
3707 	struct interim_data_params int_data = {
3708 		(const char * const *)test_buffers,
3709 		2,
3710 		&i,
3711 		&ts_params->def_comp_xform,
3712 		&ts_params->def_decomp_xform,
3713 		1
3714 	};
3715 
3716 	struct test_data_params test_data = {
3717 		.compress_state = RTE_COMP_OP_STATELESS,
3718 		.decompress_state = RTE_COMP_OP_STATELESS,
3719 		.buff_type = SGL_BOTH,
3720 		.zlib_dir = ZLIB_NONE,
3721 		.out_of_space = 0,
3722 		.big_data = 1,
3723 		.overflow = OVERFLOW_DISABLED,
3724 		.ratio = RATIO_DISABLED
3725 	};
3726 
3727 	ts_params->def_comp_xform->compress.deflate.huffman =
3728 			RTE_COMP_HUFFMAN_DYNAMIC;
3729 
3730 	/* fill the buffer with data based on rand. data */
3731 	srand(IM_BUF_DATA_TEST_SIZE_SGL);
3732 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3733 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3734 
3735 	/* Compress with compressdev, decompress with compressdev */
3736 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3737 		ret = TEST_FAILED;
3738 		goto end;
3739 	}
3740 
3741 end:
3742 	ts_params->def_comp_xform->compress.deflate.huffman =
3743 			RTE_COMP_HUFFMAN_DEFAULT;
3744 	rte_free(test_buffer);
3745 	return ret;
3746 }
3747 
3748 static int
3749 test_compressdev_deflate_im_buffers_SGL_2ops_second(void)
3750 {
3751 	struct comp_testsuite_params *ts_params = &testsuite_params;
3752 	uint16_t i = 0;
3753 	int ret = TEST_SUCCESS;
3754 	int j;
3755 	const struct rte_compressdev_capabilities *capab;
3756 	char *test_buffer = NULL;
3757 	const char *test_buffers[2];
3758 
3759 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3760 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3761 
3762 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3763 		return -ENOTSUP;
3764 
3765 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3766 		return -ENOTSUP;
3767 
3768 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3769 	if (test_buffer == NULL) {
3770 		RTE_LOG(ERR, USER1,
3771 			"Can't allocate buffer for big-data\n");
3772 		return TEST_FAILED;
3773 	}
3774 
3775 	test_buffers[0] = compress_test_bufs[0];
3776 	test_buffers[1] = test_buffer;
3777 
3778 	struct interim_data_params int_data = {
3779 		(const char * const *)test_buffers,
3780 		2,
3781 		&i,
3782 		&ts_params->def_comp_xform,
3783 		&ts_params->def_decomp_xform,
3784 		1
3785 	};
3786 
3787 	struct test_data_params test_data = {
3788 		.compress_state = RTE_COMP_OP_STATELESS,
3789 		.decompress_state = RTE_COMP_OP_STATELESS,
3790 		.buff_type = SGL_BOTH,
3791 		.zlib_dir = ZLIB_NONE,
3792 		.out_of_space = 0,
3793 		.big_data = 1,
3794 		.overflow = OVERFLOW_DISABLED,
3795 		.ratio = RATIO_DISABLED
3796 	};
3797 
3798 	ts_params->def_comp_xform->compress.deflate.huffman =
3799 			RTE_COMP_HUFFMAN_DYNAMIC;
3800 
3801 	/* fill the buffer with data based on rand. data */
3802 	srand(IM_BUF_DATA_TEST_SIZE_SGL);
3803 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3804 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3805 
3806 	/* Compress with compressdev, decompress with compressdev */
3807 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3808 		ret = TEST_FAILED;
3809 		goto end;
3810 	}
3811 
3812 end:
3813 	ts_params->def_comp_xform->compress.deflate.huffman =
3814 			RTE_COMP_HUFFMAN_DEFAULT;
3815 	rte_free(test_buffer);
3816 	return ret;
3817 }
3818 
3819 static int
3820 test_compressdev_deflate_im_buffers_SGL_3ops(void)
3821 {
3822 	struct comp_testsuite_params *ts_params = &testsuite_params;
3823 	uint16_t i = 0;
3824 	int ret = TEST_SUCCESS;
3825 	int j;
3826 	const struct rte_compressdev_capabilities *capab;
3827 	char *test_buffer = NULL;
3828 	const char *test_buffers[3];
3829 
3830 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3831 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3832 
3833 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3834 		return -ENOTSUP;
3835 
3836 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3837 		return -ENOTSUP;
3838 
3839 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3840 	if (test_buffer == NULL) {
3841 		RTE_LOG(ERR, USER1,
3842 			"Can't allocate buffer for big-data\n");
3843 		return TEST_FAILED;
3844 	}
3845 
3846 	test_buffers[0] = compress_test_bufs[0];
3847 	test_buffers[1] = test_buffer;
3848 	test_buffers[2] = compress_test_bufs[1];
3849 
3850 	struct interim_data_params int_data = {
3851 		(const char * const *)test_buffers,
3852 		3,
3853 		&i,
3854 		&ts_params->def_comp_xform,
3855 		&ts_params->def_decomp_xform,
3856 		1
3857 	};
3858 
3859 	struct test_data_params test_data = {
3860 		.compress_state = RTE_COMP_OP_STATELESS,
3861 		.decompress_state = RTE_COMP_OP_STATELESS,
3862 		.buff_type = SGL_BOTH,
3863 		.zlib_dir = ZLIB_NONE,
3864 		.out_of_space = 0,
3865 		.big_data = 1,
3866 		.overflow = OVERFLOW_DISABLED,
3867 		.ratio = RATIO_DISABLED
3868 	};
3869 
3870 	ts_params->def_comp_xform->compress.deflate.huffman =
3871 			RTE_COMP_HUFFMAN_DYNAMIC;
3872 
3873 	/* fill the buffer with data based on rand. data */
3874 	srand(IM_BUF_DATA_TEST_SIZE_SGL);
3875 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3876 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3877 
3878 	/* Compress with compressdev, decompress with compressdev */
3879 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3880 		ret = TEST_FAILED;
3881 		goto end;
3882 	}
3883 
3884 end:
3885 	ts_params->def_comp_xform->compress.deflate.huffman =
3886 			RTE_COMP_HUFFMAN_DEFAULT;
3887 	rte_free(test_buffer);
3888 	return ret;
3889 }
3890 
3891 
3892 static int
3893 test_compressdev_deflate_im_buffers_SGL_4ops(void)
3894 {
3895 	struct comp_testsuite_params *ts_params = &testsuite_params;
3896 	uint16_t i = 0;
3897 	int ret = TEST_SUCCESS;
3898 	int j;
3899 	const struct rte_compressdev_capabilities *capab;
3900 	char *test_buffer = NULL;
3901 	const char *test_buffers[4];
3902 
3903 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3904 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3905 
3906 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3907 		return -ENOTSUP;
3908 
3909 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3910 		return -ENOTSUP;
3911 
3912 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_SGL, 0);
3913 	if (test_buffer == NULL) {
3914 		RTE_LOG(ERR, USER1,
3915 			"Can't allocate buffer for big-data\n");
3916 		return TEST_FAILED;
3917 	}
3918 
3919 	test_buffers[0] = compress_test_bufs[0];
3920 	test_buffers[1] = test_buffer;
3921 	test_buffers[2] = compress_test_bufs[1];
3922 	test_buffers[3] = test_buffer;
3923 
3924 	struct interim_data_params int_data = {
3925 		(const char * const *)test_buffers,
3926 		4,
3927 		&i,
3928 		&ts_params->def_comp_xform,
3929 		&ts_params->def_decomp_xform,
3930 		1
3931 	};
3932 
3933 	struct test_data_params test_data = {
3934 		.compress_state = RTE_COMP_OP_STATELESS,
3935 		.decompress_state = RTE_COMP_OP_STATELESS,
3936 		.buff_type = SGL_BOTH,
3937 		.zlib_dir = ZLIB_NONE,
3938 		.out_of_space = 0,
3939 		.big_data = 1,
3940 		.overflow = OVERFLOW_DISABLED,
3941 		.ratio = RATIO_DISABLED
3942 	};
3943 
3944 	ts_params->def_comp_xform->compress.deflate.huffman =
3945 			RTE_COMP_HUFFMAN_DYNAMIC;
3946 
3947 	/* fill the buffer with data based on rand. data */
3948 	srand(IM_BUF_DATA_TEST_SIZE_SGL);
3949 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_SGL - 1; ++j)
3950 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
3951 
3952 	/* Compress with compressdev, decompress with compressdev */
3953 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
3954 		ret = TEST_FAILED;
3955 		goto end;
3956 	}
3957 
3958 end:
3959 	ts_params->def_comp_xform->compress.deflate.huffman =
3960 			RTE_COMP_HUFFMAN_DEFAULT;
3961 	rte_free(test_buffer);
3962 	return ret;
3963 }
3964 
3965 static int
3966 test_compressdev_deflate_im_buffers_SGL_over_1op(void)
3967 {
3968 	struct comp_testsuite_params *ts_params = &testsuite_params;
3969 	uint16_t i = 0;
3970 	int ret = TEST_SUCCESS;
3971 	int j;
3972 	const struct rte_compressdev_capabilities *capab;
3973 	char *test_buffer = NULL;
3974 
3975 	RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
3976 
3977 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
3978 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
3979 
3980 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
3981 		return -ENOTSUP;
3982 
3983 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
3984 		return -ENOTSUP;
3985 
3986 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
3987 	if (test_buffer == NULL) {
3988 		RTE_LOG(ERR, USER1,
3989 			"Can't allocate buffer for big-data\n");
3990 		return TEST_FAILED;
3991 	}
3992 
3993 	struct interim_data_params int_data = {
3994 		(const char * const *)&test_buffer,
3995 		1,
3996 		&i,
3997 		&ts_params->def_comp_xform,
3998 		&ts_params->def_decomp_xform,
3999 		1
4000 	};
4001 
4002 	struct test_data_params test_data = {
4003 		.compress_state = RTE_COMP_OP_STATELESS,
4004 		.decompress_state = RTE_COMP_OP_STATELESS,
4005 		.buff_type = SGL_BOTH,
4006 		.zlib_dir = ZLIB_NONE,
4007 		.out_of_space = 0,
4008 		.big_data = 1,
4009 		.overflow = OVERFLOW_DISABLED,
4010 		.ratio = RATIO_DISABLED
4011 	};
4012 
4013 	ts_params->def_comp_xform->compress.deflate.huffman =
4014 			RTE_COMP_HUFFMAN_DYNAMIC;
4015 
4016 	/* fill the buffer with data based on rand. data */
4017 	srand(IM_BUF_DATA_TEST_SIZE_OVER);
4018 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4019 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4020 
4021 	/* Compress with compressdev, decompress with compressdev */
4022 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4023 		ret = TEST_SUCCESS;
4024 		goto end;
4025 	}
4026 
4027 end:
4028 	ts_params->def_comp_xform->compress.deflate.huffman =
4029 			RTE_COMP_HUFFMAN_DEFAULT;
4030 	rte_free(test_buffer);
4031 
4032 	return ret;
4033 }
4034 
4035 
4036 static int
4037 test_compressdev_deflate_im_buffers_SGL_over_2ops_first(void)
4038 {
4039 	struct comp_testsuite_params *ts_params = &testsuite_params;
4040 	uint16_t i = 0;
4041 	int ret = TEST_SUCCESS;
4042 	int j;
4043 	const struct rte_compressdev_capabilities *capab;
4044 	char *test_buffer = NULL;
4045 	const char *test_buffers[2];
4046 
4047 	RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4048 
4049 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4050 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4051 
4052 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4053 		return -ENOTSUP;
4054 
4055 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4056 		return -ENOTSUP;
4057 
4058 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4059 	if (test_buffer == NULL) {
4060 		RTE_LOG(ERR, USER1,
4061 			"Can't allocate buffer for big-data\n");
4062 		return TEST_FAILED;
4063 	}
4064 
4065 	test_buffers[0] = test_buffer;
4066 	test_buffers[1] = compress_test_bufs[0];
4067 
4068 	struct interim_data_params int_data = {
4069 		(const char * const *)test_buffers,
4070 		2,
4071 		&i,
4072 		&ts_params->def_comp_xform,
4073 		&ts_params->def_decomp_xform,
4074 		1
4075 	};
4076 
4077 	struct test_data_params test_data = {
4078 		.compress_state = RTE_COMP_OP_STATELESS,
4079 		.decompress_state = RTE_COMP_OP_STATELESS,
4080 		.buff_type = SGL_BOTH,
4081 		.zlib_dir = ZLIB_NONE,
4082 		.out_of_space = 0,
4083 		.big_data = 1,
4084 		.overflow = OVERFLOW_DISABLED,
4085 		.ratio = RATIO_DISABLED
4086 	};
4087 
4088 	ts_params->def_comp_xform->compress.deflate.huffman =
4089 			RTE_COMP_HUFFMAN_DYNAMIC;
4090 
4091 	/* fill the buffer with data based on rand. data */
4092 	srand(IM_BUF_DATA_TEST_SIZE_OVER);
4093 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4094 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4095 
4096 	/* Compress with compressdev, decompress with compressdev */
4097 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4098 		ret = TEST_SUCCESS;
4099 		goto end;
4100 	}
4101 
4102 end:
4103 	ts_params->def_comp_xform->compress.deflate.huffman =
4104 			RTE_COMP_HUFFMAN_DEFAULT;
4105 	rte_free(test_buffer);
4106 	return ret;
4107 }
4108 
4109 static int
4110 test_compressdev_deflate_im_buffers_SGL_over_2ops_second(void)
4111 {
4112 	struct comp_testsuite_params *ts_params = &testsuite_params;
4113 	uint16_t i = 0;
4114 	int ret = TEST_SUCCESS;
4115 	int j;
4116 	const struct rte_compressdev_capabilities *capab;
4117 	char *test_buffer = NULL;
4118 	const char *test_buffers[2];
4119 
4120 	RTE_LOG(INFO, USER1, "This is a negative test, errors are expected\n");
4121 
4122 	capab = rte_compressdev_capability_get(0, RTE_COMP_ALGO_DEFLATE);
4123 	TEST_ASSERT(capab != NULL, "Failed to retrieve device capabilities");
4124 
4125 	if ((capab->comp_feature_flags & RTE_COMP_FF_HUFFMAN_DYNAMIC) == 0)
4126 		return -ENOTSUP;
4127 
4128 	if ((capab->comp_feature_flags & RTE_COMP_FF_OOP_SGL_IN_SGL_OUT) == 0)
4129 		return -ENOTSUP;
4130 
4131 	test_buffer = rte_malloc(NULL, IM_BUF_DATA_TEST_SIZE_OVER, 0);
4132 	if (test_buffer == NULL) {
4133 		RTE_LOG(ERR, USER1,
4134 			"Can't allocate buffer for big-data\n");
4135 		return TEST_FAILED;
4136 	}
4137 
4138 	test_buffers[0] = compress_test_bufs[0];
4139 	test_buffers[1] = test_buffer;
4140 
4141 	struct interim_data_params int_data = {
4142 		(const char * const *)test_buffers,
4143 		2,
4144 		&i,
4145 		&ts_params->def_comp_xform,
4146 		&ts_params->def_decomp_xform,
4147 		1
4148 	};
4149 
4150 	struct test_data_params test_data = {
4151 		.compress_state = RTE_COMP_OP_STATELESS,
4152 		.decompress_state = RTE_COMP_OP_STATELESS,
4153 		.buff_type = SGL_BOTH,
4154 		.zlib_dir = ZLIB_NONE,
4155 		.out_of_space = 0,
4156 		.big_data = 1,
4157 		.overflow = OVERFLOW_DISABLED,
4158 		.ratio = RATIO_DISABLED
4159 	};
4160 
4161 	ts_params->def_comp_xform->compress.deflate.huffman =
4162 			RTE_COMP_HUFFMAN_DYNAMIC;
4163 
4164 	/* fill the buffer with data based on rand. data */
4165 	srand(IM_BUF_DATA_TEST_SIZE_OVER);
4166 	for (j = 0; j < IM_BUF_DATA_TEST_SIZE_OVER - 1; ++j)
4167 		test_buffer[j] = (uint8_t)(rand() % ((uint8_t)-1)) | 1;
4168 
4169 	/* Compress with compressdev, decompress with compressdev */
4170 	if (test_deflate_comp_decomp(&int_data, &test_data) < 0) {
4171 		ret = TEST_SUCCESS;
4172 		goto end;
4173 	}
4174 
4175 end:
4176 	ts_params->def_comp_xform->compress.deflate.huffman =
4177 			RTE_COMP_HUFFMAN_DEFAULT;
4178 	rte_free(test_buffer);
4179 	return ret;
4180 }
4181 
4182 static struct unit_test_suite compressdev_testsuite  = {
4183 	.suite_name = "compressdev unit test suite",
4184 	.setup = testsuite_setup,
4185 	.teardown = testsuite_teardown,
4186 	.unit_test_cases = {
4187 		TEST_CASE_ST(NULL, NULL,
4188 			test_compressdev_invalid_configuration),
4189 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4190 			test_compressdev_deflate_stateless_fixed),
4191 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4192 			test_compressdev_deflate_stateless_dynamic),
4193 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4194 			test_compressdev_deflate_stateless_dynamic_big),
4195 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4196 			test_compressdev_deflate_stateless_multi_op),
4197 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4198 			test_compressdev_deflate_stateless_multi_level),
4199 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4200 			test_compressdev_deflate_stateless_multi_xform),
4201 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4202 			test_compressdev_deflate_stateless_sgl),
4203 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4204 			test_compressdev_deflate_stateless_checksum),
4205 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4206 			test_compressdev_out_of_space_buffer),
4207 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4208 			test_compressdev_deflate_stateful_decomp),
4209 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4210 			test_compressdev_deflate_stateful_decomp_checksum),
4211 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4212 			test_compressdev_external_mbufs),
4213 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4214 		      test_compressdev_deflate_stateless_fixed_oos_recoverable),
4215 
4216 		/* Positive test cases for IM buffer handling verification */
4217 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4218 			test_compressdev_deflate_im_buffers_LB_1op),
4219 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4220 			test_compressdev_deflate_im_buffers_LB_2ops_first),
4221 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4222 			test_compressdev_deflate_im_buffers_LB_2ops_second),
4223 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4224 			test_compressdev_deflate_im_buffers_LB_3ops),
4225 
4226 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4227 			test_compressdev_deflate_im_buffers_LB_4ops),
4228 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4229 			test_compressdev_deflate_im_buffers_SGL_1op),
4230 
4231 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4232 			test_compressdev_deflate_im_buffers_SGL_2ops_first),
4233 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4234 			test_compressdev_deflate_im_buffers_SGL_2ops_second),
4235 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4236 			test_compressdev_deflate_im_buffers_SGL_3ops),
4237 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4238 			test_compressdev_deflate_im_buffers_SGL_4ops),
4239 
4240 		/* Negative test cases for IM buffer handling verification */
4241 
4242 		/* For this test huge mempool is necessary.
4243 		 * It tests one case:
4244 		 * only one op containing big amount of data, so that
4245 		 * number of requested descriptors higher than number
4246 		 * of available descriptors (128)
4247 		 */
4248 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4249 			test_compressdev_deflate_im_buffers_SGL_over_1op),
4250 
4251 		/* For this test huge mempool is necessary.
4252 		 * 2 ops. First op contains big amount of data:
4253 		 * number of requested descriptors higher than number
4254 		 * of available descriptors (128), the second op is
4255 		 * relatively small. In this case both ops are rejected
4256 		 */
4257 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4258 		       test_compressdev_deflate_im_buffers_SGL_over_2ops_first),
4259 
4260 		TEST_CASE_ST(generic_ut_setup, generic_ut_teardown,
4261 		      test_compressdev_deflate_im_buffers_SGL_over_2ops_second),
4262 
4263 		TEST_CASES_END() /**< NULL terminate unit test array */
4264 	}
4265 };
4266 
4267 static int
4268 test_compressdev(void)
4269 {
4270 	return unit_test_suite_runner(&compressdev_testsuite);
4271 }
4272 
4273 REGISTER_TEST_COMMAND(compressdev_autotest, test_compressdev);
4274