xref: /dpdk/lib/acl/rte_acl.c (revision daa02b5cddbb8e11b31d41e2bf7bb1ae64dcae2f)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #include <rte_eal_memconfig.h>
6 #include <rte_string_fns.h>
7 #include <rte_acl.h>
8 #include <rte_tailq.h>
9 #include <rte_vect.h>
10 
11 #include "acl.h"
12 
13 TAILQ_HEAD(rte_acl_list, rte_tailq_entry);
14 
15 static struct rte_tailq_elem rte_acl_tailq = {
16 	.name = "RTE_ACL",
17 };
18 EAL_REGISTER_TAILQ(rte_acl_tailq)
19 
20 #ifndef CC_AVX512_SUPPORT
21 /*
22  * If the compiler doesn't support AVX512 instructions,
23  * then the dummy one would be used instead for AVX512 classify method.
24  */
25 int
26 rte_acl_classify_avx512x16(__rte_unused const struct rte_acl_ctx *ctx,
27 	__rte_unused const uint8_t **data,
28 	__rte_unused uint32_t *results,
29 	__rte_unused uint32_t num,
30 	__rte_unused uint32_t categories)
31 {
32 	return -ENOTSUP;
33 }
34 
35 int
36 rte_acl_classify_avx512x32(__rte_unused const struct rte_acl_ctx *ctx,
37 	__rte_unused const uint8_t **data,
38 	__rte_unused uint32_t *results,
39 	__rte_unused uint32_t num,
40 	__rte_unused uint32_t categories)
41 {
42 	return -ENOTSUP;
43 }
44 #endif
45 
46 #ifndef CC_AVX2_SUPPORT
47 /*
48  * If the compiler doesn't support AVX2 instructions,
49  * then the dummy one would be used instead for AVX2 classify method.
50  */
51 int
52 rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx,
53 	__rte_unused const uint8_t **data,
54 	__rte_unused uint32_t *results,
55 	__rte_unused uint32_t num,
56 	__rte_unused uint32_t categories)
57 {
58 	return -ENOTSUP;
59 }
60 #endif
61 
62 #ifndef RTE_ARCH_X86
63 int
64 rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx *ctx,
65 	__rte_unused const uint8_t **data,
66 	__rte_unused uint32_t *results,
67 	__rte_unused uint32_t num,
68 	__rte_unused uint32_t categories)
69 {
70 	return -ENOTSUP;
71 }
72 #endif
73 
74 #ifndef RTE_ARCH_ARM
75 int
76 rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx *ctx,
77 	__rte_unused const uint8_t **data,
78 	__rte_unused uint32_t *results,
79 	__rte_unused uint32_t num,
80 	__rte_unused uint32_t categories)
81 {
82 	return -ENOTSUP;
83 }
84 #endif
85 
86 #ifndef RTE_ARCH_PPC_64
87 int
88 rte_acl_classify_altivec(__rte_unused const struct rte_acl_ctx *ctx,
89 	__rte_unused const uint8_t **data,
90 	__rte_unused uint32_t *results,
91 	__rte_unused uint32_t num,
92 	__rte_unused uint32_t categories)
93 {
94 	return -ENOTSUP;
95 }
96 #endif
97 
98 static const rte_acl_classify_t classify_fns[] = {
99 	[RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar,
100 	[RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar,
101 	[RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse,
102 	[RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2,
103 	[RTE_ACL_CLASSIFY_NEON] = rte_acl_classify_neon,
104 	[RTE_ACL_CLASSIFY_ALTIVEC] = rte_acl_classify_altivec,
105 	[RTE_ACL_CLASSIFY_AVX512X16] = rte_acl_classify_avx512x16,
106 	[RTE_ACL_CLASSIFY_AVX512X32] = rte_acl_classify_avx512x32,
107 };
108 
109 /*
110  * Helper function for acl_check_alg.
111  * Check support for ARM specific classify methods.
112  */
113 static int
114 acl_check_alg_arm(enum rte_acl_classify_alg alg)
115 {
116 	if (alg == RTE_ACL_CLASSIFY_NEON) {
117 #if defined(RTE_ARCH_ARM64)
118 		if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
119 			return 0;
120 #elif defined(RTE_ARCH_ARM)
121 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON) &&
122 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
123 			return 0;
124 #endif
125 		return -ENOTSUP;
126 	}
127 
128 	return -EINVAL;
129 }
130 
131 /*
132  * Helper function for acl_check_alg.
133  * Check support for PPC specific classify methods.
134  */
135 static int
136 acl_check_alg_ppc(enum rte_acl_classify_alg alg)
137 {
138 	if (alg == RTE_ACL_CLASSIFY_ALTIVEC) {
139 #if defined(RTE_ARCH_PPC_64)
140 		if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
141 			return 0;
142 #endif
143 		return -ENOTSUP;
144 	}
145 
146 	return -EINVAL;
147 }
148 
149 #ifdef CC_AVX512_SUPPORT
150 static int
151 acl_check_avx512_cpu_flags(void)
152 {
153 	return (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512F) &&
154 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512VL) &&
155 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512CD) &&
156 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512BW));
157 }
158 #endif
159 
160 /*
161  * Helper function for acl_check_alg.
162  * Check support for x86 specific classify methods.
163  */
164 static int
165 acl_check_alg_x86(enum rte_acl_classify_alg alg)
166 {
167 	if (alg == RTE_ACL_CLASSIFY_AVX512X32) {
168 #ifdef CC_AVX512_SUPPORT
169 		if (acl_check_avx512_cpu_flags() != 0 &&
170 			rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_512)
171 			return 0;
172 #endif
173 		return -ENOTSUP;
174 	}
175 
176 	if (alg == RTE_ACL_CLASSIFY_AVX512X16) {
177 #ifdef CC_AVX512_SUPPORT
178 		if (acl_check_avx512_cpu_flags() != 0 &&
179 			rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
180 			return 0;
181 #endif
182 		return -ENOTSUP;
183 	}
184 
185 	if (alg == RTE_ACL_CLASSIFY_AVX2) {
186 #ifdef CC_AVX2_SUPPORT
187 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2) &&
188 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
189 			return 0;
190 #endif
191 		return -ENOTSUP;
192 	}
193 
194 	if (alg == RTE_ACL_CLASSIFY_SSE) {
195 #ifdef RTE_ARCH_X86
196 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1) &&
197 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
198 			return 0;
199 #endif
200 		return -ENOTSUP;
201 	}
202 
203 	return -EINVAL;
204 }
205 
206 /*
207  * Check if input alg is supported by given platform/binary.
208  * Note that both conditions should be met:
209  * - at build time compiler supports ISA used by given methods
210  * - at run time target cpu supports necessary ISA.
211  */
212 static int
213 acl_check_alg(enum rte_acl_classify_alg alg)
214 {
215 	switch (alg) {
216 	case RTE_ACL_CLASSIFY_NEON:
217 		return acl_check_alg_arm(alg);
218 	case RTE_ACL_CLASSIFY_ALTIVEC:
219 		return acl_check_alg_ppc(alg);
220 	case RTE_ACL_CLASSIFY_AVX512X32:
221 	case RTE_ACL_CLASSIFY_AVX512X16:
222 	case RTE_ACL_CLASSIFY_AVX2:
223 	case RTE_ACL_CLASSIFY_SSE:
224 		return acl_check_alg_x86(alg);
225 	/* scalar method is supported on all platforms */
226 	case RTE_ACL_CLASSIFY_SCALAR:
227 		return 0;
228 	default:
229 		return -EINVAL;
230 	}
231 }
232 
233 /*
234  * Get preferred alg for given platform.
235  */
236 static enum rte_acl_classify_alg
237 acl_get_best_alg(void)
238 {
239 	/*
240 	 * array of supported methods for each platform.
241 	 * Note that order is important - from most to less preferable.
242 	 */
243 	static const enum rte_acl_classify_alg alg[] = {
244 #if defined(RTE_ARCH_ARM)
245 		RTE_ACL_CLASSIFY_NEON,
246 #elif defined(RTE_ARCH_PPC_64)
247 		RTE_ACL_CLASSIFY_ALTIVEC,
248 #elif defined(RTE_ARCH_X86)
249 		RTE_ACL_CLASSIFY_AVX512X32,
250 		RTE_ACL_CLASSIFY_AVX512X16,
251 		RTE_ACL_CLASSIFY_AVX2,
252 		RTE_ACL_CLASSIFY_SSE,
253 #endif
254 		RTE_ACL_CLASSIFY_SCALAR,
255 	};
256 
257 	uint32_t i;
258 
259 	/* find best possible alg */
260 	for (i = 0; i != RTE_DIM(alg) && acl_check_alg(alg[i]) != 0; i++)
261 		;
262 
263 	/* we always have to find something suitable */
264 	RTE_VERIFY(i != RTE_DIM(alg));
265 	return alg[i];
266 }
267 
268 extern int
269 rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg)
270 {
271 	int32_t rc;
272 
273 	/* formal parameters check */
274 	if (ctx == NULL || (uint32_t)alg >= RTE_DIM(classify_fns))
275 		return -EINVAL;
276 
277 	/* user asked us to select the *best* one */
278 	if (alg == RTE_ACL_CLASSIFY_DEFAULT)
279 		alg = acl_get_best_alg();
280 
281 	/* check that given alg is supported */
282 	rc = acl_check_alg(alg);
283 	if (rc != 0)
284 		return rc;
285 
286 	ctx->alg = alg;
287 	return 0;
288 }
289 
290 int
291 rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data,
292 	uint32_t *results, uint32_t num, uint32_t categories,
293 	enum rte_acl_classify_alg alg)
294 {
295 	if (categories != 1 &&
296 			((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0)
297 		return -EINVAL;
298 
299 	return classify_fns[alg](ctx, data, results, num, categories);
300 }
301 
302 int
303 rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data,
304 	uint32_t *results, uint32_t num, uint32_t categories)
305 {
306 	return rte_acl_classify_alg(ctx, data, results, num, categories,
307 		ctx->alg);
308 }
309 
310 struct rte_acl_ctx *
311 rte_acl_find_existing(const char *name)
312 {
313 	struct rte_acl_ctx *ctx = NULL;
314 	struct rte_acl_list *acl_list;
315 	struct rte_tailq_entry *te;
316 
317 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
318 
319 	rte_mcfg_tailq_read_lock();
320 	TAILQ_FOREACH(te, acl_list, next) {
321 		ctx = (struct rte_acl_ctx *) te->data;
322 		if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0)
323 			break;
324 	}
325 	rte_mcfg_tailq_read_unlock();
326 
327 	if (te == NULL) {
328 		rte_errno = ENOENT;
329 		return NULL;
330 	}
331 	return ctx;
332 }
333 
334 void
335 rte_acl_free(struct rte_acl_ctx *ctx)
336 {
337 	struct rte_acl_list *acl_list;
338 	struct rte_tailq_entry *te;
339 
340 	if (ctx == NULL)
341 		return;
342 
343 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
344 
345 	rte_mcfg_tailq_write_lock();
346 
347 	/* find our tailq entry */
348 	TAILQ_FOREACH(te, acl_list, next) {
349 		if (te->data == (void *) ctx)
350 			break;
351 	}
352 	if (te == NULL) {
353 		rte_mcfg_tailq_write_unlock();
354 		return;
355 	}
356 
357 	TAILQ_REMOVE(acl_list, te, next);
358 
359 	rte_mcfg_tailq_write_unlock();
360 
361 	rte_free(ctx->mem);
362 	rte_free(ctx);
363 	rte_free(te);
364 }
365 
366 struct rte_acl_ctx *
367 rte_acl_create(const struct rte_acl_param *param)
368 {
369 	size_t sz;
370 	struct rte_acl_ctx *ctx;
371 	struct rte_acl_list *acl_list;
372 	struct rte_tailq_entry *te;
373 	char name[sizeof(ctx->name)];
374 
375 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
376 
377 	/* check that input parameters are valid. */
378 	if (param == NULL || param->name == NULL) {
379 		rte_errno = EINVAL;
380 		return NULL;
381 	}
382 
383 	snprintf(name, sizeof(name), "ACL_%s", param->name);
384 
385 	/* calculate amount of memory required for pattern set. */
386 	sz = sizeof(*ctx) + param->max_rule_num * param->rule_size;
387 
388 	/* get EAL TAILQ lock. */
389 	rte_mcfg_tailq_write_lock();
390 
391 	/* if we already have one with that name */
392 	TAILQ_FOREACH(te, acl_list, next) {
393 		ctx = (struct rte_acl_ctx *) te->data;
394 		if (strncmp(param->name, ctx->name, sizeof(ctx->name)) == 0)
395 			break;
396 	}
397 
398 	/* if ACL with such name doesn't exist, then create a new one. */
399 	if (te == NULL) {
400 		ctx = NULL;
401 		te = rte_zmalloc("ACL_TAILQ_ENTRY", sizeof(*te), 0);
402 
403 		if (te == NULL) {
404 			RTE_LOG(ERR, ACL, "Cannot allocate tailq entry!\n");
405 			goto exit;
406 		}
407 
408 		ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id);
409 
410 		if (ctx == NULL) {
411 			RTE_LOG(ERR, ACL,
412 				"allocation of %zu bytes on socket %d for %s failed\n",
413 				sz, param->socket_id, name);
414 			rte_free(te);
415 			goto exit;
416 		}
417 		/* init new allocated context. */
418 		ctx->rules = ctx + 1;
419 		ctx->max_rules = param->max_rule_num;
420 		ctx->rule_sz = param->rule_size;
421 		ctx->socket_id = param->socket_id;
422 		ctx->alg = acl_get_best_alg();
423 		strlcpy(ctx->name, param->name, sizeof(ctx->name));
424 
425 		te->data = (void *) ctx;
426 
427 		TAILQ_INSERT_TAIL(acl_list, te, next);
428 	}
429 
430 exit:
431 	rte_mcfg_tailq_write_unlock();
432 	return ctx;
433 }
434 
435 static int
436 acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num)
437 {
438 	uint8_t *pos;
439 
440 	if (num + ctx->num_rules > ctx->max_rules)
441 		return -ENOMEM;
442 
443 	pos = ctx->rules;
444 	pos += ctx->rule_sz * ctx->num_rules;
445 	memcpy(pos, rules, num * ctx->rule_sz);
446 	ctx->num_rules += num;
447 
448 	return 0;
449 }
450 
451 static int
452 acl_check_rule(const struct rte_acl_rule_data *rd)
453 {
454 	if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) &
455 			rd->category_mask) == 0 ||
456 			rd->priority > RTE_ACL_MAX_PRIORITY ||
457 			rd->priority < RTE_ACL_MIN_PRIORITY)
458 		return -EINVAL;
459 	return 0;
460 }
461 
462 int
463 rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules,
464 	uint32_t num)
465 {
466 	const struct rte_acl_rule *rv;
467 	uint32_t i;
468 	int32_t rc;
469 
470 	if (ctx == NULL || rules == NULL || 0 == ctx->rule_sz)
471 		return -EINVAL;
472 
473 	for (i = 0; i != num; i++) {
474 		rv = (const struct rte_acl_rule *)
475 			((uintptr_t)rules + i * ctx->rule_sz);
476 		rc = acl_check_rule(&rv->data);
477 		if (rc != 0) {
478 			RTE_LOG(ERR, ACL, "%s(%s): rule #%u is invalid\n",
479 				__func__, ctx->name, i + 1);
480 			return rc;
481 		}
482 	}
483 
484 	return acl_add_rules(ctx, rules, num);
485 }
486 
487 /*
488  * Reset all rules.
489  * Note that RT structures are not affected.
490  */
491 void
492 rte_acl_reset_rules(struct rte_acl_ctx *ctx)
493 {
494 	if (ctx != NULL)
495 		ctx->num_rules = 0;
496 }
497 
498 /*
499  * Reset all rules and destroys RT structures.
500  */
501 void
502 rte_acl_reset(struct rte_acl_ctx *ctx)
503 {
504 	if (ctx != NULL) {
505 		rte_acl_reset_rules(ctx);
506 		rte_acl_build(ctx, &ctx->config);
507 	}
508 }
509 
510 /*
511  * Dump ACL context to the stdout.
512  */
513 void
514 rte_acl_dump(const struct rte_acl_ctx *ctx)
515 {
516 	if (!ctx)
517 		return;
518 	printf("acl context <%s>@%p\n", ctx->name, ctx);
519 	printf("  socket_id=%"PRId32"\n", ctx->socket_id);
520 	printf("  alg=%"PRId32"\n", ctx->alg);
521 	printf("  first_load_sz=%"PRIu32"\n", ctx->first_load_sz);
522 	printf("  max_rules=%"PRIu32"\n", ctx->max_rules);
523 	printf("  rule_size=%"PRIu32"\n", ctx->rule_sz);
524 	printf("  num_rules=%"PRIu32"\n", ctx->num_rules);
525 	printf("  num_categories=%"PRIu32"\n", ctx->num_categories);
526 	printf("  num_tries=%"PRIu32"\n", ctx->num_tries);
527 }
528 
529 /*
530  * Dump all ACL contexts to the stdout.
531  */
532 void
533 rte_acl_list_dump(void)
534 {
535 	struct rte_acl_ctx *ctx;
536 	struct rte_acl_list *acl_list;
537 	struct rte_tailq_entry *te;
538 
539 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
540 
541 	rte_mcfg_tailq_read_lock();
542 	TAILQ_FOREACH(te, acl_list, next) {
543 		ctx = (struct rte_acl_ctx *) te->data;
544 		rte_acl_dump(ctx);
545 	}
546 	rte_mcfg_tailq_read_unlock();
547 }
548