1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 */
4
5 #include <string.h>
6
7 #include <rte_common.h>
8 #include <rte_malloc.h>
9 #include <cryptodev_pmd.h>
10
11 #include "armv8_pmd_private.h"
12
13 static const struct rte_cryptodev_capabilities
14 armv8_crypto_pmd_capabilities[] = {
15 { /* SHA1 HMAC */
16 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
17 {.sym = {
18 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
19 {.auth = {
20 .algo = RTE_CRYPTO_AUTH_SHA1_HMAC,
21 .block_size = 64,
22 .key_size = {
23 .min = 1,
24 .max = 64,
25 .increment = 1
26 },
27 .digest_size = {
28 .min = 1,
29 .max = 20,
30 .increment = 1
31 },
32 .iv_size = { 0 }
33 }, }
34 }, }
35 },
36 { /* SHA256 HMAC */
37 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
38 {.sym = {
39 .xform_type = RTE_CRYPTO_SYM_XFORM_AUTH,
40 {.auth = {
41 .algo = RTE_CRYPTO_AUTH_SHA256_HMAC,
42 .block_size = 64,
43 .key_size = {
44 .min = 1,
45 .max = 64,
46 .increment = 1
47 },
48 .digest_size = {
49 .min = 1,
50 .max = 32,
51 .increment = 1
52 },
53 .iv_size = { 0 }
54 }, }
55 }, }
56 },
57 { /* AES CBC */
58 .op = RTE_CRYPTO_OP_TYPE_SYMMETRIC,
59 {.sym = {
60 .xform_type = RTE_CRYPTO_SYM_XFORM_CIPHER,
61 {.cipher = {
62 .algo = RTE_CRYPTO_CIPHER_AES_CBC,
63 .block_size = 16,
64 .key_size = {
65 .min = 16,
66 .max = 16,
67 .increment = 0
68 },
69 .iv_size = {
70 .min = 16,
71 .max = 16,
72 .increment = 0
73 }
74 }, }
75 }, }
76 },
77
78 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
79 };
80
81
82 /** Configure device */
83 static int
armv8_crypto_pmd_config(__rte_unused struct rte_cryptodev * dev,__rte_unused struct rte_cryptodev_config * config)84 armv8_crypto_pmd_config(__rte_unused struct rte_cryptodev *dev,
85 __rte_unused struct rte_cryptodev_config *config)
86 {
87 return 0;
88 }
89
90 /** Start device */
91 static int
armv8_crypto_pmd_start(__rte_unused struct rte_cryptodev * dev)92 armv8_crypto_pmd_start(__rte_unused struct rte_cryptodev *dev)
93 {
94 return 0;
95 }
96
97 /** Stop device */
98 static void
armv8_crypto_pmd_stop(__rte_unused struct rte_cryptodev * dev)99 armv8_crypto_pmd_stop(__rte_unused struct rte_cryptodev *dev)
100 {
101 }
102
103 /** Close device */
104 static int
armv8_crypto_pmd_close(__rte_unused struct rte_cryptodev * dev)105 armv8_crypto_pmd_close(__rte_unused struct rte_cryptodev *dev)
106 {
107 return 0;
108 }
109
110
111 /** Get device statistics */
112 static void
armv8_crypto_pmd_stats_get(struct rte_cryptodev * dev,struct rte_cryptodev_stats * stats)113 armv8_crypto_pmd_stats_get(struct rte_cryptodev *dev,
114 struct rte_cryptodev_stats *stats)
115 {
116 int qp_id;
117
118 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
119 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
120
121 stats->enqueued_count += qp->stats.enqueued_count;
122 stats->dequeued_count += qp->stats.dequeued_count;
123
124 stats->enqueue_err_count += qp->stats.enqueue_err_count;
125 stats->dequeue_err_count += qp->stats.dequeue_err_count;
126 }
127 }
128
129 /** Reset device statistics */
130 static void
armv8_crypto_pmd_stats_reset(struct rte_cryptodev * dev)131 armv8_crypto_pmd_stats_reset(struct rte_cryptodev *dev)
132 {
133 int qp_id;
134
135 for (qp_id = 0; qp_id < dev->data->nb_queue_pairs; qp_id++) {
136 struct armv8_crypto_qp *qp = dev->data->queue_pairs[qp_id];
137
138 memset(&qp->stats, 0, sizeof(qp->stats));
139 }
140 }
141
142
143 /** Get device info */
144 static void
armv8_crypto_pmd_info_get(struct rte_cryptodev * dev,struct rte_cryptodev_info * dev_info)145 armv8_crypto_pmd_info_get(struct rte_cryptodev *dev,
146 struct rte_cryptodev_info *dev_info)
147 {
148 struct armv8_crypto_private *internals = dev->data->dev_private;
149
150 if (dev_info != NULL) {
151 dev_info->driver_id = dev->driver_id;
152 dev_info->feature_flags = dev->feature_flags;
153 dev_info->capabilities = armv8_crypto_pmd_capabilities;
154 dev_info->max_nb_queue_pairs = internals->max_nb_qpairs;
155 /* No limit of number of sessions */
156 dev_info->sym.max_nb_sessions = 0;
157 }
158 }
159
160 /** Release queue pair */
161 static int
armv8_crypto_pmd_qp_release(struct rte_cryptodev * dev,uint16_t qp_id)162 armv8_crypto_pmd_qp_release(struct rte_cryptodev *dev, uint16_t qp_id)
163 {
164
165 if (dev->data->queue_pairs[qp_id] != NULL) {
166 rte_free(dev->data->queue_pairs[qp_id]);
167 dev->data->queue_pairs[qp_id] = NULL;
168 }
169
170 return 0;
171 }
172
173 /** set a unique name for the queue pair based on it's name, dev_id and qp_id */
174 static int
armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev * dev,struct armv8_crypto_qp * qp)175 armv8_crypto_pmd_qp_set_unique_name(struct rte_cryptodev *dev,
176 struct armv8_crypto_qp *qp)
177 {
178 unsigned int n;
179
180 n = snprintf(qp->name, sizeof(qp->name), "armv8_crypto_pmd_%u_qp_%u",
181 dev->data->dev_id, qp->id);
182
183 if (n >= sizeof(qp->name))
184 return -1;
185
186 return 0;
187 }
188
189
190 /** Create a ring to place processed operations on */
191 static struct rte_ring *
armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp * qp,unsigned int ring_size,int socket_id)192 armv8_crypto_pmd_qp_create_processed_ops_ring(struct armv8_crypto_qp *qp,
193 unsigned int ring_size, int socket_id)
194 {
195 struct rte_ring *r;
196
197 r = rte_ring_lookup(qp->name);
198 if (r) {
199 if (rte_ring_get_size(r) >= ring_size) {
200 ARMV8_CRYPTO_LOG_INFO(
201 "Reusing existing ring %s for processed ops",
202 qp->name);
203 return r;
204 }
205
206 ARMV8_CRYPTO_LOG_ERR(
207 "Unable to reuse existing ring %s for processed ops",
208 qp->name);
209 return NULL;
210 }
211
212 return rte_ring_create(qp->name, ring_size, socket_id,
213 RING_F_SP_ENQ | RING_F_SC_DEQ);
214 }
215
216
217 /** Setup a queue pair */
218 static int
armv8_crypto_pmd_qp_setup(struct rte_cryptodev * dev,uint16_t qp_id,const struct rte_cryptodev_qp_conf * qp_conf,int socket_id)219 armv8_crypto_pmd_qp_setup(struct rte_cryptodev *dev, uint16_t qp_id,
220 const struct rte_cryptodev_qp_conf *qp_conf,
221 int socket_id)
222 {
223 struct armv8_crypto_qp *qp = NULL;
224
225 /* Free memory prior to re-allocation if needed. */
226 if (dev->data->queue_pairs[qp_id] != NULL)
227 armv8_crypto_pmd_qp_release(dev, qp_id);
228
229 /* Allocate the queue pair data structure. */
230 qp = rte_zmalloc_socket("ARMv8 PMD Queue Pair", sizeof(*qp),
231 RTE_CACHE_LINE_SIZE, socket_id);
232 if (qp == NULL)
233 return -ENOMEM;
234
235 qp->id = qp_id;
236 dev->data->queue_pairs[qp_id] = qp;
237
238 if (armv8_crypto_pmd_qp_set_unique_name(dev, qp) != 0)
239 goto qp_setup_cleanup;
240
241 qp->processed_ops = armv8_crypto_pmd_qp_create_processed_ops_ring(qp,
242 qp_conf->nb_descriptors, socket_id);
243 if (qp->processed_ops == NULL)
244 goto qp_setup_cleanup;
245
246 qp->sess_mp = qp_conf->mp_session;
247
248 memset(&qp->stats, 0, sizeof(qp->stats));
249
250 return 0;
251
252 qp_setup_cleanup:
253 rte_free(qp);
254
255 return -1;
256 }
257
258 /** Returns the size of the session structure */
259 static unsigned
armv8_crypto_pmd_sym_session_get_size(struct rte_cryptodev * dev __rte_unused)260 armv8_crypto_pmd_sym_session_get_size(struct rte_cryptodev *dev __rte_unused)
261 {
262 return sizeof(struct armv8_crypto_session);
263 }
264
265 /** Configure the session from a crypto xform chain */
266 static int
armv8_crypto_pmd_sym_session_configure(struct rte_cryptodev * dev __rte_unused,struct rte_crypto_sym_xform * xform,struct rte_cryptodev_sym_session * sess)267 armv8_crypto_pmd_sym_session_configure(struct rte_cryptodev *dev __rte_unused,
268 struct rte_crypto_sym_xform *xform,
269 struct rte_cryptodev_sym_session *sess)
270 {
271 void *sess_private_data;
272 int ret;
273
274 if (unlikely(sess == NULL)) {
275 ARMV8_CRYPTO_LOG_ERR("invalid session struct");
276 return -EINVAL;
277 }
278
279 sess_private_data = sess->driver_priv_data;
280
281 ret = armv8_crypto_set_session_parameters(sess_private_data, xform);
282 if (ret != 0) {
283 ARMV8_CRYPTO_LOG_ERR("failed configure session parameters");
284 return ret;
285 }
286
287 return 0;
288 }
289
290 /** Clear the memory of session so it doesn't leave key material behind */
291 static void
armv8_crypto_pmd_sym_session_clear(struct rte_cryptodev * dev __rte_unused,struct rte_cryptodev_sym_session * sess __rte_unused)292 armv8_crypto_pmd_sym_session_clear(struct rte_cryptodev *dev __rte_unused,
293 struct rte_cryptodev_sym_session *sess __rte_unused)
294 {}
295
296 struct rte_cryptodev_ops armv8_crypto_pmd_ops = {
297 .dev_configure = armv8_crypto_pmd_config,
298 .dev_start = armv8_crypto_pmd_start,
299 .dev_stop = armv8_crypto_pmd_stop,
300 .dev_close = armv8_crypto_pmd_close,
301
302 .stats_get = armv8_crypto_pmd_stats_get,
303 .stats_reset = armv8_crypto_pmd_stats_reset,
304
305 .dev_infos_get = armv8_crypto_pmd_info_get,
306
307 .queue_pair_setup = armv8_crypto_pmd_qp_setup,
308 .queue_pair_release = armv8_crypto_pmd_qp_release,
309
310 .sym_session_get_size = armv8_crypto_pmd_sym_session_get_size,
311 .sym_session_configure = armv8_crypto_pmd_sym_session_configure,
312 .sym_session_clear = armv8_crypto_pmd_sym_session_clear
313 };
314
315 struct rte_cryptodev_ops *rte_armv8_crypto_pmd_ops = &armv8_crypto_pmd_ops;
316