xref: /dpdk/lib/acl/rte_acl.c (revision ae67895b507bb6af22263c79ba0d5c374b396485)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #include <rte_eal_memconfig.h>
6 #include <rte_string_fns.h>
7 #include <rte_acl.h>
8 #include <rte_tailq.h>
9 
10 #include "acl.h"
11 #include "acl_log.h"
12 
13 RTE_LOG_REGISTER_DEFAULT(acl_logtype, INFO);
14 
15 TAILQ_HEAD(rte_acl_list, rte_tailq_entry);
16 
17 static struct rte_tailq_elem rte_acl_tailq = {
18 	.name = "RTE_ACL",
19 };
EAL_REGISTER_TAILQ(rte_acl_tailq)20 EAL_REGISTER_TAILQ(rte_acl_tailq)
21 
22 #ifndef CC_AVX512_SUPPORT
23 /*
24  * If the compiler doesn't support AVX512 instructions,
25  * then the dummy one would be used instead for AVX512 classify method.
26  */
27 int
28 rte_acl_classify_avx512x16(__rte_unused const struct rte_acl_ctx *ctx,
29 	__rte_unused const uint8_t **data,
30 	__rte_unused uint32_t *results,
31 	__rte_unused uint32_t num,
32 	__rte_unused uint32_t categories)
33 {
34 	return -ENOTSUP;
35 }
36 
37 int
rte_acl_classify_avx512x32(__rte_unused const struct rte_acl_ctx * ctx,__rte_unused const uint8_t ** data,__rte_unused uint32_t * results,__rte_unused uint32_t num,__rte_unused uint32_t categories)38 rte_acl_classify_avx512x32(__rte_unused const struct rte_acl_ctx *ctx,
39 	__rte_unused const uint8_t **data,
40 	__rte_unused uint32_t *results,
41 	__rte_unused uint32_t num,
42 	__rte_unused uint32_t categories)
43 {
44 	return -ENOTSUP;
45 }
46 #endif
47 
48 #ifndef RTE_ARCH_X86
49 /*
50  * If ISA doesn't have AVX2 or SSE, provide dummy fallbacks
51  */
52 int
rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx * ctx,__rte_unused const uint8_t ** data,__rte_unused uint32_t * results,__rte_unused uint32_t num,__rte_unused uint32_t categories)53 rte_acl_classify_avx2(__rte_unused const struct rte_acl_ctx *ctx,
54 	__rte_unused const uint8_t **data,
55 	__rte_unused uint32_t *results,
56 	__rte_unused uint32_t num,
57 	__rte_unused uint32_t categories)
58 {
59 	return -ENOTSUP;
60 }
61 int
rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx * ctx,__rte_unused const uint8_t ** data,__rte_unused uint32_t * results,__rte_unused uint32_t num,__rte_unused uint32_t categories)62 rte_acl_classify_sse(__rte_unused const struct rte_acl_ctx *ctx,
63 	__rte_unused const uint8_t **data,
64 	__rte_unused uint32_t *results,
65 	__rte_unused uint32_t num,
66 	__rte_unused uint32_t categories)
67 {
68 	return -ENOTSUP;
69 }
70 #endif
71 
72 #ifndef RTE_ARCH_ARM
73 int
rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx * ctx,__rte_unused const uint8_t ** data,__rte_unused uint32_t * results,__rte_unused uint32_t num,__rte_unused uint32_t categories)74 rte_acl_classify_neon(__rte_unused const struct rte_acl_ctx *ctx,
75 	__rte_unused const uint8_t **data,
76 	__rte_unused uint32_t *results,
77 	__rte_unused uint32_t num,
78 	__rte_unused uint32_t categories)
79 {
80 	return -ENOTSUP;
81 }
82 #endif
83 
84 #ifndef RTE_ARCH_PPC_64
85 int
rte_acl_classify_altivec(__rte_unused const struct rte_acl_ctx * ctx,__rte_unused const uint8_t ** data,__rte_unused uint32_t * results,__rte_unused uint32_t num,__rte_unused uint32_t categories)86 rte_acl_classify_altivec(__rte_unused const struct rte_acl_ctx *ctx,
87 	__rte_unused const uint8_t **data,
88 	__rte_unused uint32_t *results,
89 	__rte_unused uint32_t num,
90 	__rte_unused uint32_t categories)
91 {
92 	return -ENOTSUP;
93 }
94 #endif
95 
96 static const rte_acl_classify_t classify_fns[] = {
97 	[RTE_ACL_CLASSIFY_DEFAULT] = rte_acl_classify_scalar,
98 	[RTE_ACL_CLASSIFY_SCALAR] = rte_acl_classify_scalar,
99 	[RTE_ACL_CLASSIFY_SSE] = rte_acl_classify_sse,
100 	[RTE_ACL_CLASSIFY_AVX2] = rte_acl_classify_avx2,
101 	[RTE_ACL_CLASSIFY_NEON] = rte_acl_classify_neon,
102 	[RTE_ACL_CLASSIFY_ALTIVEC] = rte_acl_classify_altivec,
103 	[RTE_ACL_CLASSIFY_AVX512X16] = rte_acl_classify_avx512x16,
104 	[RTE_ACL_CLASSIFY_AVX512X32] = rte_acl_classify_avx512x32,
105 };
106 
107 /*
108  * Helper function for acl_check_alg.
109  * Check support for ARM specific classify methods.
110  */
111 static int
acl_check_alg_arm(enum rte_acl_classify_alg alg)112 acl_check_alg_arm(enum rte_acl_classify_alg alg)
113 {
114 	if (alg == RTE_ACL_CLASSIFY_NEON) {
115 #if defined(RTE_ARCH_ARM64)
116 		if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
117 			return 0;
118 #elif defined(RTE_ARCH_ARM)
119 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_NEON) &&
120 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
121 			return 0;
122 #endif
123 		return -ENOTSUP;
124 	}
125 
126 	return -EINVAL;
127 }
128 
129 /*
130  * Helper function for acl_check_alg.
131  * Check support for PPC specific classify methods.
132  */
133 static int
acl_check_alg_ppc(enum rte_acl_classify_alg alg)134 acl_check_alg_ppc(enum rte_acl_classify_alg alg)
135 {
136 	if (alg == RTE_ACL_CLASSIFY_ALTIVEC) {
137 #if defined(RTE_ARCH_PPC_64)
138 		if (rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
139 			return 0;
140 #endif
141 		return -ENOTSUP;
142 	}
143 
144 	return -EINVAL;
145 }
146 
147 #ifdef CC_AVX512_SUPPORT
148 static int
acl_check_avx512_cpu_flags(void)149 acl_check_avx512_cpu_flags(void)
150 {
151 	return (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512F) &&
152 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512VL) &&
153 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512CD) &&
154 			rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX512BW));
155 }
156 #endif
157 
158 /*
159  * Helper function for acl_check_alg.
160  * Check support for x86 specific classify methods.
161  */
162 static int
acl_check_alg_x86(enum rte_acl_classify_alg alg)163 acl_check_alg_x86(enum rte_acl_classify_alg alg)
164 {
165 	if (alg == RTE_ACL_CLASSIFY_AVX512X32) {
166 #ifdef CC_AVX512_SUPPORT
167 		if (acl_check_avx512_cpu_flags() != 0 &&
168 			rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_512)
169 			return 0;
170 #endif
171 		return -ENOTSUP;
172 	}
173 
174 	if (alg == RTE_ACL_CLASSIFY_AVX512X16) {
175 #ifdef CC_AVX512_SUPPORT
176 		if (acl_check_avx512_cpu_flags() != 0 &&
177 			rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
178 			return 0;
179 #endif
180 		return -ENOTSUP;
181 	}
182 
183 	if (alg == RTE_ACL_CLASSIFY_AVX2) {
184 #ifdef RTE_ARCH_X86
185 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2) &&
186 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_256)
187 			return 0;
188 #endif
189 		return -ENOTSUP;
190 	}
191 
192 	if (alg == RTE_ACL_CLASSIFY_SSE) {
193 #ifdef RTE_ARCH_X86
194 		if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_SSE4_1) &&
195 				rte_vect_get_max_simd_bitwidth() >= RTE_VECT_SIMD_128)
196 			return 0;
197 #endif
198 		return -ENOTSUP;
199 	}
200 
201 	return -EINVAL;
202 }
203 
204 /*
205  * Check if input alg is supported by given platform/binary.
206  * Note that both conditions should be met:
207  * - at build time compiler supports ISA used by given methods
208  * - at run time target cpu supports necessary ISA.
209  */
210 static int
acl_check_alg(enum rte_acl_classify_alg alg)211 acl_check_alg(enum rte_acl_classify_alg alg)
212 {
213 	switch (alg) {
214 	case RTE_ACL_CLASSIFY_NEON:
215 		return acl_check_alg_arm(alg);
216 	case RTE_ACL_CLASSIFY_ALTIVEC:
217 		return acl_check_alg_ppc(alg);
218 	case RTE_ACL_CLASSIFY_AVX512X32:
219 	case RTE_ACL_CLASSIFY_AVX512X16:
220 	case RTE_ACL_CLASSIFY_AVX2:
221 	case RTE_ACL_CLASSIFY_SSE:
222 		return acl_check_alg_x86(alg);
223 	/* scalar method is supported on all platforms */
224 	case RTE_ACL_CLASSIFY_SCALAR:
225 		return 0;
226 	default:
227 		return -EINVAL;
228 	}
229 }
230 
231 /*
232  * Get preferred alg for given platform.
233  */
234 static enum rte_acl_classify_alg
acl_get_best_alg(void)235 acl_get_best_alg(void)
236 {
237 	/*
238 	 * array of supported methods for each platform.
239 	 * Note that order is important - from most to less preferable.
240 	 */
241 	static const enum rte_acl_classify_alg alg[] = {
242 #if defined(RTE_ARCH_ARM)
243 		RTE_ACL_CLASSIFY_NEON,
244 #elif defined(RTE_ARCH_PPC_64)
245 		RTE_ACL_CLASSIFY_ALTIVEC,
246 #elif defined(RTE_ARCH_X86)
247 		RTE_ACL_CLASSIFY_AVX512X32,
248 		RTE_ACL_CLASSIFY_AVX512X16,
249 		RTE_ACL_CLASSIFY_AVX2,
250 		RTE_ACL_CLASSIFY_SSE,
251 #endif
252 		RTE_ACL_CLASSIFY_SCALAR,
253 	};
254 
255 	uint32_t i;
256 
257 	/* find best possible alg */
258 	for (i = 0; i != RTE_DIM(alg) && acl_check_alg(alg[i]) != 0; i++)
259 		;
260 
261 	/* we always have to find something suitable */
262 	RTE_VERIFY(i != RTE_DIM(alg));
263 	return alg[i];
264 }
265 
266 extern int
rte_acl_set_ctx_classify(struct rte_acl_ctx * ctx,enum rte_acl_classify_alg alg)267 rte_acl_set_ctx_classify(struct rte_acl_ctx *ctx, enum rte_acl_classify_alg alg)
268 {
269 	int32_t rc;
270 
271 	/* formal parameters check */
272 	if (ctx == NULL || (uint32_t)alg >= RTE_DIM(classify_fns))
273 		return -EINVAL;
274 
275 	/* user asked us to select the *best* one */
276 	if (alg == RTE_ACL_CLASSIFY_DEFAULT)
277 		alg = acl_get_best_alg();
278 
279 	/* check that given alg is supported */
280 	rc = acl_check_alg(alg);
281 	if (rc != 0)
282 		return rc;
283 
284 	ctx->alg = alg;
285 	return 0;
286 }
287 
288 int
rte_acl_classify_alg(const struct rte_acl_ctx * ctx,const uint8_t ** data,uint32_t * results,uint32_t num,uint32_t categories,enum rte_acl_classify_alg alg)289 rte_acl_classify_alg(const struct rte_acl_ctx *ctx, const uint8_t **data,
290 	uint32_t *results, uint32_t num, uint32_t categories,
291 	enum rte_acl_classify_alg alg)
292 {
293 	if (categories != 1 &&
294 			((RTE_ACL_RESULTS_MULTIPLIER - 1) & categories) != 0)
295 		return -EINVAL;
296 
297 	return classify_fns[alg](ctx, data, results, num, categories);
298 }
299 
300 int
rte_acl_classify(const struct rte_acl_ctx * ctx,const uint8_t ** data,uint32_t * results,uint32_t num,uint32_t categories)301 rte_acl_classify(const struct rte_acl_ctx *ctx, const uint8_t **data,
302 	uint32_t *results, uint32_t num, uint32_t categories)
303 {
304 	return rte_acl_classify_alg(ctx, data, results, num, categories,
305 		ctx->alg);
306 }
307 
308 struct rte_acl_ctx *
rte_acl_find_existing(const char * name)309 rte_acl_find_existing(const char *name)
310 {
311 	struct rte_acl_ctx *ctx = NULL;
312 	struct rte_acl_list *acl_list;
313 	struct rte_tailq_entry *te;
314 
315 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
316 
317 	rte_mcfg_tailq_read_lock();
318 	TAILQ_FOREACH(te, acl_list, next) {
319 		ctx = (struct rte_acl_ctx *) te->data;
320 		if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0)
321 			break;
322 	}
323 	rte_mcfg_tailq_read_unlock();
324 
325 	if (te == NULL) {
326 		rte_errno = ENOENT;
327 		return NULL;
328 	}
329 	return ctx;
330 }
331 
332 void
rte_acl_free(struct rte_acl_ctx * ctx)333 rte_acl_free(struct rte_acl_ctx *ctx)
334 {
335 	struct rte_acl_list *acl_list;
336 	struct rte_tailq_entry *te;
337 
338 	if (ctx == NULL)
339 		return;
340 
341 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
342 
343 	rte_mcfg_tailq_write_lock();
344 
345 	/* find our tailq entry */
346 	TAILQ_FOREACH(te, acl_list, next) {
347 		if (te->data == (void *) ctx)
348 			break;
349 	}
350 	if (te == NULL) {
351 		rte_mcfg_tailq_write_unlock();
352 		return;
353 	}
354 
355 	TAILQ_REMOVE(acl_list, te, next);
356 
357 	rte_mcfg_tailq_write_unlock();
358 
359 	rte_free(ctx->mem);
360 	rte_free(ctx);
361 	rte_free(te);
362 }
363 
364 struct rte_acl_ctx *
rte_acl_create(const struct rte_acl_param * param)365 rte_acl_create(const struct rte_acl_param *param)
366 {
367 	size_t sz;
368 	struct rte_acl_ctx *ctx;
369 	struct rte_acl_list *acl_list;
370 	struct rte_tailq_entry *te;
371 	char name[sizeof(ctx->name)];
372 
373 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
374 
375 	/* check that input parameters are valid. */
376 	if (param == NULL || param->name == NULL) {
377 		rte_errno = EINVAL;
378 		return NULL;
379 	}
380 
381 	snprintf(name, sizeof(name), "ACL_%s", param->name);
382 
383 	/* calculate amount of memory required for pattern set. */
384 	sz = sizeof(*ctx) + param->max_rule_num * param->rule_size;
385 
386 	/* get EAL TAILQ lock. */
387 	rte_mcfg_tailq_write_lock();
388 
389 	/* if we already have one with that name */
390 	TAILQ_FOREACH(te, acl_list, next) {
391 		ctx = (struct rte_acl_ctx *) te->data;
392 		if (strncmp(param->name, ctx->name, sizeof(ctx->name)) == 0)
393 			break;
394 	}
395 
396 	/* if ACL with such name doesn't exist, then create a new one. */
397 	if (te == NULL) {
398 		ctx = NULL;
399 		te = rte_zmalloc("ACL_TAILQ_ENTRY", sizeof(*te), 0);
400 
401 		if (te == NULL) {
402 			ACL_LOG(ERR, "Cannot allocate tailq entry!");
403 			goto exit;
404 		}
405 
406 		ctx = rte_zmalloc_socket(name, sz, RTE_CACHE_LINE_SIZE, param->socket_id);
407 
408 		if (ctx == NULL) {
409 			ACL_LOG(ERR,
410 				"allocation of %zu bytes on socket %d for %s failed",
411 				sz, param->socket_id, name);
412 			rte_free(te);
413 			goto exit;
414 		}
415 		/* init new allocated context. */
416 		ctx->rules = ctx + 1;
417 		ctx->max_rules = param->max_rule_num;
418 		ctx->rule_sz = param->rule_size;
419 		ctx->socket_id = param->socket_id;
420 		ctx->alg = acl_get_best_alg();
421 		strlcpy(ctx->name, param->name, sizeof(ctx->name));
422 
423 		te->data = (void *) ctx;
424 
425 		TAILQ_INSERT_TAIL(acl_list, te, next);
426 	}
427 
428 exit:
429 	rte_mcfg_tailq_write_unlock();
430 	return ctx;
431 }
432 
433 static int
acl_add_rules(struct rte_acl_ctx * ctx,const void * rules,uint32_t num)434 acl_add_rules(struct rte_acl_ctx *ctx, const void *rules, uint32_t num)
435 {
436 	uint8_t *pos;
437 
438 	if (num + ctx->num_rules > ctx->max_rules)
439 		return -ENOMEM;
440 
441 	pos = ctx->rules;
442 	pos += ctx->rule_sz * ctx->num_rules;
443 	memcpy(pos, rules, num * ctx->rule_sz);
444 	ctx->num_rules += num;
445 
446 	return 0;
447 }
448 
449 static int
acl_check_rule(const struct rte_acl_rule_data * rd)450 acl_check_rule(const struct rte_acl_rule_data *rd)
451 {
452 	if ((RTE_LEN2MASK(RTE_ACL_MAX_CATEGORIES, typeof(rd->category_mask)) &
453 			rd->category_mask) == 0 ||
454 			rd->priority > RTE_ACL_MAX_PRIORITY ||
455 			rd->priority < RTE_ACL_MIN_PRIORITY)
456 		return -EINVAL;
457 	return 0;
458 }
459 
460 int
rte_acl_add_rules(struct rte_acl_ctx * ctx,const struct rte_acl_rule * rules,uint32_t num)461 rte_acl_add_rules(struct rte_acl_ctx *ctx, const struct rte_acl_rule *rules,
462 	uint32_t num)
463 {
464 	const struct rte_acl_rule *rv;
465 	uint32_t i;
466 	int32_t rc;
467 
468 	if (ctx == NULL || rules == NULL || 0 == ctx->rule_sz)
469 		return -EINVAL;
470 
471 	for (i = 0; i != num; i++) {
472 		rv = (const struct rte_acl_rule *)
473 			((uintptr_t)rules + i * ctx->rule_sz);
474 		rc = acl_check_rule(&rv->data);
475 		if (rc != 0) {
476 			ACL_LOG(ERR, "%s(%s): rule #%u is invalid",
477 				__func__, ctx->name, i + 1);
478 			return rc;
479 		}
480 	}
481 
482 	return acl_add_rules(ctx, rules, num);
483 }
484 
485 /*
486  * Reset all rules.
487  * Note that RT structures are not affected.
488  */
489 void
rte_acl_reset_rules(struct rte_acl_ctx * ctx)490 rte_acl_reset_rules(struct rte_acl_ctx *ctx)
491 {
492 	if (ctx != NULL)
493 		ctx->num_rules = 0;
494 }
495 
496 /*
497  * Reset all rules and destroys RT structures.
498  */
499 void
rte_acl_reset(struct rte_acl_ctx * ctx)500 rte_acl_reset(struct rte_acl_ctx *ctx)
501 {
502 	if (ctx != NULL) {
503 		rte_acl_reset_rules(ctx);
504 		rte_acl_build(ctx, &ctx->config);
505 	}
506 }
507 
508 /*
509  * Dump ACL context to the stdout.
510  */
511 void
rte_acl_dump(const struct rte_acl_ctx * ctx)512 rte_acl_dump(const struct rte_acl_ctx *ctx)
513 {
514 	if (!ctx)
515 		return;
516 	printf("acl context <%s>@%p\n", ctx->name, ctx);
517 	printf("  socket_id=%"PRId32"\n", ctx->socket_id);
518 	printf("  alg=%"PRId32"\n", ctx->alg);
519 	printf("  first_load_sz=%"PRIu32"\n", ctx->first_load_sz);
520 	printf("  max_rules=%"PRIu32"\n", ctx->max_rules);
521 	printf("  rule_size=%"PRIu32"\n", ctx->rule_sz);
522 	printf("  num_rules=%"PRIu32"\n", ctx->num_rules);
523 	printf("  num_categories=%"PRIu32"\n", ctx->num_categories);
524 	printf("  num_tries=%"PRIu32"\n", ctx->num_tries);
525 }
526 
527 /*
528  * Dump all ACL contexts to the stdout.
529  */
530 void
rte_acl_list_dump(void)531 rte_acl_list_dump(void)
532 {
533 	struct rte_acl_ctx *ctx;
534 	struct rte_acl_list *acl_list;
535 	struct rte_tailq_entry *te;
536 
537 	acl_list = RTE_TAILQ_CAST(rte_acl_tailq.head, rte_acl_list);
538 
539 	rte_mcfg_tailq_read_lock();
540 	TAILQ_FOREACH(te, acl_list, next) {
541 		ctx = (struct rte_acl_ctx *) te->data;
542 		rte_acl_dump(ctx);
543 	}
544 	rte_mcfg_tailq_read_unlock();
545 }
546