1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017-2018 Intel Corporation
3 */
4
5 #include "rte_comp.h"
6 #include "rte_compressdev_internal.h"
7
8 const char *
rte_comp_get_feature_name(uint64_t flag)9 rte_comp_get_feature_name(uint64_t flag)
10 {
11 switch (flag) {
12 case RTE_COMP_FF_STATEFUL_COMPRESSION:
13 return "STATEFUL_COMPRESSION";
14 case RTE_COMP_FF_STATEFUL_DECOMPRESSION:
15 return "STATEFUL_DECOMPRESSION";
16 case RTE_COMP_FF_OOP_SGL_IN_SGL_OUT:
17 return "OOP_SGL_IN_SGL_OUT";
18 case RTE_COMP_FF_OOP_SGL_IN_LB_OUT:
19 return "OOP_SGL_IN_LB_OUT";
20 case RTE_COMP_FF_OOP_LB_IN_SGL_OUT:
21 return "OOP_LB_IN_SGL_OUT";
22 case RTE_COMP_FF_MULTI_PKT_CHECKSUM:
23 return "MULTI_PKT_CHECKSUM";
24 case RTE_COMP_FF_ADLER32_CHECKSUM:
25 return "ADLER32_CHECKSUM";
26 case RTE_COMP_FF_CRC32_CHECKSUM:
27 return "CRC32_CHECKSUM";
28 case RTE_COMP_FF_CRC32_ADLER32_CHECKSUM:
29 return "CRC32_ADLER32_CHECKSUM";
30 case RTE_COMP_FF_NONCOMPRESSED_BLOCKS:
31 return "NONCOMPRESSED_BLOCKS";
32 case RTE_COMP_FF_SHA1_HASH:
33 return "SHA1_HASH";
34 case RTE_COMP_FF_SHA2_SHA256_HASH:
35 return "SHA2_SHA256_HASH";
36 case RTE_COMP_FF_SHAREABLE_PRIV_XFORM:
37 return "SHAREABLE_PRIV_XFORM";
38 case RTE_COMP_FF_HUFFMAN_FIXED:
39 return "HUFFMAN_FIXED";
40 case RTE_COMP_FF_HUFFMAN_DYNAMIC:
41 return "HUFFMAN_DYNAMIC";
42 case RTE_COMP_FF_XXHASH32_CHECKSUM:
43 return "XXHASH32_CHECKSUM";
44 case RTE_COMP_FF_LZ4_DICT_ID:
45 return "LZ4_DICT_ID";
46 case RTE_COMP_FF_LZ4_CONTENT_WITH_CHECKSUM:
47 return "LZ4_CONTENT_WITH_CHECKSUM";
48 case RTE_COMP_FF_LZ4_CONTENT_SIZE:
49 return "LZ4_CONTENT_SIZE";
50 case RTE_COMP_FF_LZ4_BLOCK_INDEPENDENCE:
51 return "LZ4_BLOCK_INDEPENDENCE";
52 case RTE_COMP_FF_LZ4_BLOCK_WITH_CHECKSUM:
53 return "LZ4_BLOCK_WITH_CHECKSUM";
54 default:
55 return NULL;
56 }
57 }
58
59 /**
60 * Reset the fields of an operation to their default values.
61 *
62 * @note The private data associated with the operation is not zeroed.
63 *
64 * @param op
65 * The operation to be reset
66 */
67 static inline void
rte_comp_op_reset(struct rte_comp_op * op)68 rte_comp_op_reset(struct rte_comp_op *op)
69 {
70 struct rte_mempool *tmp_mp = op->mempool;
71 rte_iova_t tmp_iova_addr = op->iova_addr;
72
73 memset(op, 0, sizeof(struct rte_comp_op));
74 op->status = RTE_COMP_OP_STATUS_NOT_PROCESSED;
75 op->iova_addr = tmp_iova_addr;
76 op->mempool = tmp_mp;
77 }
78
79 /**
80 * Private data structure belonging to an operation pool.
81 */
82 struct rte_comp_op_pool_private {
83 uint16_t user_size;
84 /**< Size of private user data with each operation. */
85 };
86
87 /**
88 * Bulk allocate raw element from mempool and return as comp operations
89 *
90 * @param mempool
91 * Compress operation mempool
92 * @param ops
93 * Array to place allocated operations
94 * @param nb_ops
95 * Number of operations to allocate
96 * @return
97 * - nb_ops: Success, the nb_ops requested was allocated
98 * - 0: Not enough entries in the mempool; no ops are retrieved.
99 */
100 static inline int
rte_comp_op_raw_bulk_alloc(struct rte_mempool * mempool,struct rte_comp_op ** ops,uint16_t nb_ops)101 rte_comp_op_raw_bulk_alloc(struct rte_mempool *mempool,
102 struct rte_comp_op **ops, uint16_t nb_ops)
103 {
104 if (rte_mempool_get_bulk(mempool, (void **)ops, nb_ops) == 0)
105 return nb_ops;
106
107 return 0;
108 }
109
110 /** Initialise rte_comp_op mempool element */
111 static void
rte_comp_op_init(struct rte_mempool * mempool,__rte_unused void * opaque_arg,void * _op_data,__rte_unused unsigned int i)112 rte_comp_op_init(struct rte_mempool *mempool,
113 __rte_unused void *opaque_arg,
114 void *_op_data,
115 __rte_unused unsigned int i)
116 {
117 struct rte_comp_op *op = _op_data;
118
119 memset(_op_data, 0, mempool->elt_size);
120
121 op->status = RTE_COMP_OP_STATUS_NOT_PROCESSED;
122 op->iova_addr = rte_mem_virt2iova(_op_data);
123 op->mempool = mempool;
124 }
125
126 struct rte_mempool *
rte_comp_op_pool_create(const char * name,unsigned int nb_elts,unsigned int cache_size,uint16_t user_size,int socket_id)127 rte_comp_op_pool_create(const char *name,
128 unsigned int nb_elts, unsigned int cache_size,
129 uint16_t user_size, int socket_id)
130 {
131 struct rte_comp_op_pool_private *priv;
132
133 unsigned int elt_size = sizeof(struct rte_comp_op) + user_size;
134
135 /* lookup mempool in case already allocated */
136 struct rte_mempool *mp = rte_mempool_lookup(name);
137
138 if (mp != NULL) {
139 priv = (struct rte_comp_op_pool_private *)
140 rte_mempool_get_priv(mp);
141
142 if (mp->elt_size != elt_size ||
143 mp->cache_size < cache_size ||
144 mp->size < nb_elts ||
145 priv->user_size < user_size) {
146 mp = NULL;
147 COMPRESSDEV_LOG(ERR,
148 "Mempool %s already exists but with incompatible parameters",
149 name);
150 return NULL;
151 }
152 return mp;
153 }
154
155 mp = rte_mempool_create(
156 name,
157 nb_elts,
158 elt_size,
159 cache_size,
160 sizeof(struct rte_comp_op_pool_private),
161 NULL,
162 NULL,
163 rte_comp_op_init,
164 NULL,
165 socket_id,
166 0);
167
168 if (mp == NULL) {
169 COMPRESSDEV_LOG(ERR, "Failed to create mempool %s", name);
170 return NULL;
171 }
172
173 priv = (struct rte_comp_op_pool_private *)
174 rte_mempool_get_priv(mp);
175
176 priv->user_size = user_size;
177
178 return mp;
179 }
180
181 struct rte_comp_op *
rte_comp_op_alloc(struct rte_mempool * mempool)182 rte_comp_op_alloc(struct rte_mempool *mempool)
183 {
184 struct rte_comp_op *op = NULL;
185 int retval;
186
187 retval = rte_comp_op_raw_bulk_alloc(mempool, &op, 1);
188 if (unlikely(retval != 1))
189 return NULL;
190
191 rte_comp_op_reset(op);
192
193 return op;
194 }
195
196 int
rte_comp_op_bulk_alloc(struct rte_mempool * mempool,struct rte_comp_op ** ops,uint16_t nb_ops)197 rte_comp_op_bulk_alloc(struct rte_mempool *mempool,
198 struct rte_comp_op **ops, uint16_t nb_ops)
199 {
200 int retval;
201 uint16_t i;
202
203 retval = rte_comp_op_raw_bulk_alloc(mempool, ops, nb_ops);
204 if (unlikely(retval != nb_ops))
205 return 0;
206
207 for (i = 0; i < nb_ops; i++)
208 rte_comp_op_reset(ops[i]);
209
210 return nb_ops;
211 }
212
213 /**
214 * free operation structure
215 * If operation has been allocate from a rte_mempool, then the operation will
216 * be returned to the mempool.
217 *
218 * @param op
219 * Compress operation
220 */
221 void
rte_comp_op_free(struct rte_comp_op * op)222 rte_comp_op_free(struct rte_comp_op *op)
223 {
224 if (op != NULL && op->mempool != NULL)
225 rte_mempool_put(op->mempool, op);
226 }
227
228 void
rte_comp_op_bulk_free(struct rte_comp_op ** ops,uint16_t nb_ops)229 rte_comp_op_bulk_free(struct rte_comp_op **ops, uint16_t nb_ops)
230 {
231 uint16_t i;
232
233 for (i = 0; i < nb_ops; i++) {
234 if (ops[i] != NULL && ops[i]->mempool != NULL)
235 rte_mempool_put(ops[i]->mempool, ops[i]);
236 ops[i] = NULL;
237 }
238 }
239