199a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause 299a2dd95SBruce Richardson * Copyright(c) 2021 Intel Corporation 399a2dd95SBruce Richardson */ 499a2dd95SBruce Richardson 5e9fd1ebfSTyler Retzlaff #include <stdalign.h> 6f1f6ebc0SWilliam Tu #include <sys/queue.h> 7f1f6ebc0SWilliam Tu 899a2dd95SBruce Richardson #include <rte_thash.h> 999a2dd95SBruce Richardson #include <rte_tailq.h> 1099a2dd95SBruce Richardson #include <rte_random.h> 1199a2dd95SBruce Richardson #include <rte_memcpy.h> 1299a2dd95SBruce Richardson #include <rte_errno.h> 1399a2dd95SBruce Richardson #include <rte_eal_memconfig.h> 1499a2dd95SBruce Richardson #include <rte_log.h> 1599a2dd95SBruce Richardson #include <rte_malloc.h> 1699a2dd95SBruce Richardson 171acf14caSStephen Hemminger RTE_LOG_REGISTER_SUFFIX(thash_logtype, thash, INFO); 181acf14caSStephen Hemminger #define RTE_LOGTYPE_HASH thash_logtype 1997433132SDavid Marchand #define HASH_LOG(level, ...) \ 2097433132SDavid Marchand RTE_LOG_LINE(level, HASH, "" __VA_ARGS__) 211acf14caSStephen Hemminger 2299a2dd95SBruce Richardson #define THASH_NAME_LEN 64 2399a2dd95SBruce Richardson #define TOEPLITZ_HASH_LEN 32 2499a2dd95SBruce Richardson 2599a2dd95SBruce Richardson #define RETA_SZ_IN_RANGE(reta_sz) ((reta_sz >= RTE_THASH_RETA_SZ_MIN) &&\ 2699a2dd95SBruce Richardson (reta_sz <= RTE_THASH_RETA_SZ_MAX)) 2799a2dd95SBruce Richardson 2899a2dd95SBruce Richardson TAILQ_HEAD(rte_thash_list, rte_tailq_entry); 2999a2dd95SBruce Richardson static struct rte_tailq_elem rte_thash_tailq = { 3099a2dd95SBruce Richardson .name = "RTE_THASH", 3199a2dd95SBruce Richardson }; 3299a2dd95SBruce Richardson EAL_REGISTER_TAILQ(rte_thash_tailq) 3399a2dd95SBruce Richardson 3499a2dd95SBruce Richardson struct thash_lfsr { 3599a2dd95SBruce Richardson uint32_t ref_cnt; 3699a2dd95SBruce Richardson uint32_t poly; 3799a2dd95SBruce Richardson /**< polynomial associated with the lfsr */ 3899a2dd95SBruce Richardson uint32_t rev_poly; 3999a2dd95SBruce Richardson /**< polynomial to generate the sequence in reverse direction */ 4099a2dd95SBruce Richardson uint32_t state; 4199a2dd95SBruce Richardson /**< current state of the lfsr */ 4299a2dd95SBruce Richardson uint32_t rev_state; 4399a2dd95SBruce Richardson /**< current state of the lfsr for reverse direction */ 4499a2dd95SBruce Richardson uint32_t deg; /**< polynomial degree*/ 4599a2dd95SBruce Richardson uint32_t bits_cnt; /**< number of bits generated by lfsr*/ 4699a2dd95SBruce Richardson }; 4799a2dd95SBruce Richardson 4899a2dd95SBruce Richardson struct rte_thash_subtuple_helper { 4999a2dd95SBruce Richardson char name[THASH_NAME_LEN]; /** < Name of subtuple configuration */ 5099a2dd95SBruce Richardson LIST_ENTRY(rte_thash_subtuple_helper) next; 5199a2dd95SBruce Richardson struct thash_lfsr *lfsr; 5299a2dd95SBruce Richardson uint32_t offset; /** < Offset of the m-sequence */ 5399a2dd95SBruce Richardson uint32_t len; /** < Length of the m-sequence */ 5499a2dd95SBruce Richardson uint32_t tuple_offset; /** < Offset in bits of the subtuple */ 5599a2dd95SBruce Richardson uint32_t tuple_len; /** < Length in bits of the subtuple */ 5699a2dd95SBruce Richardson uint32_t lsb_msk; /** < (1 << reta_sz_log) - 1 */ 578efc7021STyler Retzlaff alignas(RTE_CACHE_LINE_SIZE) uint32_t compl_table[]; 5899a2dd95SBruce Richardson /** < Complementary table */ 5999a2dd95SBruce Richardson }; 6099a2dd95SBruce Richardson 6199a2dd95SBruce Richardson struct rte_thash_ctx { 6299a2dd95SBruce Richardson char name[THASH_NAME_LEN]; 6399a2dd95SBruce Richardson LIST_HEAD(, rte_thash_subtuple_helper) head; 6499a2dd95SBruce Richardson uint32_t key_len; /** < Length of the NIC RSS hash key */ 6599a2dd95SBruce Richardson uint32_t reta_sz_log; /** < size of the RSS ReTa in bits */ 6699a2dd95SBruce Richardson uint32_t subtuples_nb; /** < number of subtuples */ 6799a2dd95SBruce Richardson uint32_t flags; 68d27e2b7eSVladimir Medvedkin uint64_t *matrices; 69d27e2b7eSVladimir Medvedkin /**< matrices used with rte_thash_gfni implementation */ 708efc7021STyler Retzlaff uint8_t hash_key[]; 7199a2dd95SBruce Richardson }; 7299a2dd95SBruce Richardson 734fd8c4cbSVladimir Medvedkin int 744fd8c4cbSVladimir Medvedkin rte_thash_gfni_supported(void) 754fd8c4cbSVladimir Medvedkin { 764fd8c4cbSVladimir Medvedkin #ifdef RTE_THASH_GFNI_DEFINED 774fd8c4cbSVladimir Medvedkin if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_GFNI) && 784fd8c4cbSVladimir Medvedkin (rte_vect_get_max_simd_bitwidth() >= 794fd8c4cbSVladimir Medvedkin RTE_VECT_SIMD_512)) 804fd8c4cbSVladimir Medvedkin return 1; 814fd8c4cbSVladimir Medvedkin #endif 824fd8c4cbSVladimir Medvedkin 834fd8c4cbSVladimir Medvedkin return 0; 844fd8c4cbSVladimir Medvedkin }; 854fd8c4cbSVladimir Medvedkin 864fd8c4cbSVladimir Medvedkin void 874fd8c4cbSVladimir Medvedkin rte_thash_complete_matrix(uint64_t *matrixes, const uint8_t *rss_key, int size) 884fd8c4cbSVladimir Medvedkin { 894fd8c4cbSVladimir Medvedkin int i, j; 904fd8c4cbSVladimir Medvedkin uint8_t *m = (uint8_t *)matrixes; 914fd8c4cbSVladimir Medvedkin uint8_t left_part, right_part; 924fd8c4cbSVladimir Medvedkin 934fd8c4cbSVladimir Medvedkin for (i = 0; i < size; i++) { 944fd8c4cbSVladimir Medvedkin for (j = 0; j < 8; j++) { 954fd8c4cbSVladimir Medvedkin left_part = rss_key[i] << j; 964fd8c4cbSVladimir Medvedkin right_part = (uint16_t)(rss_key[(i + 1) % size]) >> 974fd8c4cbSVladimir Medvedkin (8 - j); 984fd8c4cbSVladimir Medvedkin m[i * 8 + j] = left_part|right_part; 994fd8c4cbSVladimir Medvedkin } 1004fd8c4cbSVladimir Medvedkin } 1014fd8c4cbSVladimir Medvedkin } 1024fd8c4cbSVladimir Medvedkin 10399a2dd95SBruce Richardson static inline uint32_t 10499a2dd95SBruce Richardson get_bit_lfsr(struct thash_lfsr *lfsr) 10599a2dd95SBruce Richardson { 10699a2dd95SBruce Richardson uint32_t bit, ret; 10799a2dd95SBruce Richardson 10899a2dd95SBruce Richardson /* 10999a2dd95SBruce Richardson * masking the TAP bits defined by the polynomial and 11099a2dd95SBruce Richardson * calculating parity 11199a2dd95SBruce Richardson */ 1123d4e27fdSDavid Marchand bit = rte_popcount32(lfsr->state & lfsr->poly) & 0x1; 11399a2dd95SBruce Richardson ret = lfsr->state & 0x1; 11499a2dd95SBruce Richardson lfsr->state = ((lfsr->state >> 1) | (bit << (lfsr->deg - 1))) & 11599a2dd95SBruce Richardson ((1 << lfsr->deg) - 1); 11699a2dd95SBruce Richardson 11799a2dd95SBruce Richardson lfsr->bits_cnt++; 11899a2dd95SBruce Richardson return ret; 11999a2dd95SBruce Richardson } 12099a2dd95SBruce Richardson 12199a2dd95SBruce Richardson static inline uint32_t 12299a2dd95SBruce Richardson get_rev_bit_lfsr(struct thash_lfsr *lfsr) 12399a2dd95SBruce Richardson { 12499a2dd95SBruce Richardson uint32_t bit, ret; 12599a2dd95SBruce Richardson 1263d4e27fdSDavid Marchand bit = rte_popcount32(lfsr->rev_state & lfsr->rev_poly) & 0x1; 12799a2dd95SBruce Richardson ret = lfsr->rev_state & (1 << (lfsr->deg - 1)); 12899a2dd95SBruce Richardson lfsr->rev_state = ((lfsr->rev_state << 1) | bit) & 12999a2dd95SBruce Richardson ((1 << lfsr->deg) - 1); 13099a2dd95SBruce Richardson 13199a2dd95SBruce Richardson lfsr->bits_cnt++; 13299a2dd95SBruce Richardson return ret; 13399a2dd95SBruce Richardson } 13499a2dd95SBruce Richardson 13599a2dd95SBruce Richardson static inline uint32_t 136ebf7f118SVladimir Medvedkin get_rev_poly(uint32_t poly, int degree) 137ebf7f118SVladimir Medvedkin { 138ebf7f118SVladimir Medvedkin int i; 139ebf7f118SVladimir Medvedkin /* 140ebf7f118SVladimir Medvedkin * The implicit highest coefficient of the polynomial 141ebf7f118SVladimir Medvedkin * becomes the lowest after reversal. 142ebf7f118SVladimir Medvedkin */ 143ebf7f118SVladimir Medvedkin uint32_t rev_poly = 1; 144ebf7f118SVladimir Medvedkin uint32_t mask = (1 << degree) - 1; 145ebf7f118SVladimir Medvedkin 146ebf7f118SVladimir Medvedkin /* 147ebf7f118SVladimir Medvedkin * Here we assume "poly" argument is an irreducible polynomial, 148ebf7f118SVladimir Medvedkin * thus the lowest coefficient of the "poly" must always be equal to "1". 149ebf7f118SVladimir Medvedkin * After the reversal, this the lowest coefficient becomes the highest and 150ebf7f118SVladimir Medvedkin * it is omitted since the highest coefficient is implicitly determined by 151ebf7f118SVladimir Medvedkin * degree of the polynomial. 152ebf7f118SVladimir Medvedkin */ 153ebf7f118SVladimir Medvedkin for (i = 1; i < degree; i++) 154ebf7f118SVladimir Medvedkin rev_poly |= ((poly >> i) & 0x1) << (degree - i); 155ebf7f118SVladimir Medvedkin 156ebf7f118SVladimir Medvedkin return rev_poly & mask; 157ebf7f118SVladimir Medvedkin } 158ebf7f118SVladimir Medvedkin 15999a2dd95SBruce Richardson static struct thash_lfsr * 160f9773e66SVladimir Medvedkin alloc_lfsr(uint32_t poly_degree) 16199a2dd95SBruce Richardson { 16299a2dd95SBruce Richardson struct thash_lfsr *lfsr; 16399a2dd95SBruce Richardson uint32_t i; 16499a2dd95SBruce Richardson 165f9773e66SVladimir Medvedkin if ((poly_degree > 32) || (poly_degree == 0)) 16699a2dd95SBruce Richardson return NULL; 16799a2dd95SBruce Richardson 16899a2dd95SBruce Richardson lfsr = rte_zmalloc(NULL, sizeof(struct thash_lfsr), 0); 16999a2dd95SBruce Richardson if (lfsr == NULL) 17099a2dd95SBruce Richardson return NULL; 17199a2dd95SBruce Richardson 172f9773e66SVladimir Medvedkin lfsr->deg = poly_degree; 17399a2dd95SBruce Richardson lfsr->poly = thash_get_rand_poly(lfsr->deg); 17499a2dd95SBruce Richardson do { 17599a2dd95SBruce Richardson lfsr->state = rte_rand() & ((1 << lfsr->deg) - 1); 17699a2dd95SBruce Richardson } while (lfsr->state == 0); 17799a2dd95SBruce Richardson /* init reverse order polynomial */ 178ebf7f118SVladimir Medvedkin lfsr->rev_poly = get_rev_poly(lfsr->poly, lfsr->deg); 17999a2dd95SBruce Richardson /* init proper rev_state*/ 18099a2dd95SBruce Richardson lfsr->rev_state = lfsr->state; 18199a2dd95SBruce Richardson for (i = 0; i <= lfsr->deg; i++) 18299a2dd95SBruce Richardson get_rev_bit_lfsr(lfsr); 18399a2dd95SBruce Richardson 18499a2dd95SBruce Richardson /* clear bits_cnt after rev_state was inited */ 18599a2dd95SBruce Richardson lfsr->bits_cnt = 0; 18699a2dd95SBruce Richardson lfsr->ref_cnt = 1; 18799a2dd95SBruce Richardson 18899a2dd95SBruce Richardson return lfsr; 18999a2dd95SBruce Richardson } 19099a2dd95SBruce Richardson 19199a2dd95SBruce Richardson static void 19299a2dd95SBruce Richardson attach_lfsr(struct rte_thash_subtuple_helper *h, struct thash_lfsr *lfsr) 19399a2dd95SBruce Richardson { 19499a2dd95SBruce Richardson lfsr->ref_cnt++; 19599a2dd95SBruce Richardson h->lfsr = lfsr; 19699a2dd95SBruce Richardson } 19799a2dd95SBruce Richardson 19899a2dd95SBruce Richardson static void 19999a2dd95SBruce Richardson free_lfsr(struct thash_lfsr *lfsr) 20099a2dd95SBruce Richardson { 20199a2dd95SBruce Richardson lfsr->ref_cnt--; 20299a2dd95SBruce Richardson if (lfsr->ref_cnt == 0) 20399a2dd95SBruce Richardson rte_free(lfsr); 20499a2dd95SBruce Richardson } 20599a2dd95SBruce Richardson 20699a2dd95SBruce Richardson struct rte_thash_ctx * 20799a2dd95SBruce Richardson rte_thash_init_ctx(const char *name, uint32_t key_len, uint32_t reta_sz, 20899a2dd95SBruce Richardson uint8_t *key, uint32_t flags) 20999a2dd95SBruce Richardson { 21099a2dd95SBruce Richardson struct rte_thash_ctx *ctx; 21199a2dd95SBruce Richardson struct rte_tailq_entry *te; 21299a2dd95SBruce Richardson struct rte_thash_list *thash_list; 21399a2dd95SBruce Richardson uint32_t i; 21499a2dd95SBruce Richardson 21599a2dd95SBruce Richardson if ((name == NULL) || (key_len == 0) || !RETA_SZ_IN_RANGE(reta_sz)) { 21699a2dd95SBruce Richardson rte_errno = EINVAL; 21799a2dd95SBruce Richardson return NULL; 21899a2dd95SBruce Richardson } 21999a2dd95SBruce Richardson 22099a2dd95SBruce Richardson thash_list = RTE_TAILQ_CAST(rte_thash_tailq.head, rte_thash_list); 22199a2dd95SBruce Richardson 22299a2dd95SBruce Richardson rte_mcfg_tailq_write_lock(); 22399a2dd95SBruce Richardson 22499a2dd95SBruce Richardson /* guarantee there's no existing */ 22599a2dd95SBruce Richardson TAILQ_FOREACH(te, thash_list, next) { 22699a2dd95SBruce Richardson ctx = (struct rte_thash_ctx *)te->data; 22799a2dd95SBruce Richardson if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0) 22899a2dd95SBruce Richardson break; 22999a2dd95SBruce Richardson } 23099a2dd95SBruce Richardson ctx = NULL; 23199a2dd95SBruce Richardson if (te != NULL) { 23299a2dd95SBruce Richardson rte_errno = EEXIST; 23399a2dd95SBruce Richardson goto exit; 23499a2dd95SBruce Richardson } 23599a2dd95SBruce Richardson 23699a2dd95SBruce Richardson /* allocate tailq entry */ 23799a2dd95SBruce Richardson te = rte_zmalloc("THASH_TAILQ_ENTRY", sizeof(*te), 0); 23899a2dd95SBruce Richardson if (te == NULL) { 239ae67895bSDavid Marchand HASH_LOG(ERR, 240ae67895bSDavid Marchand "Can not allocate tailq entry for thash context %s", 24199a2dd95SBruce Richardson name); 24299a2dd95SBruce Richardson rte_errno = ENOMEM; 24399a2dd95SBruce Richardson goto exit; 24499a2dd95SBruce Richardson } 24599a2dd95SBruce Richardson 24699a2dd95SBruce Richardson ctx = rte_zmalloc(NULL, sizeof(struct rte_thash_ctx) + key_len, 0); 24799a2dd95SBruce Richardson if (ctx == NULL) { 248ae67895bSDavid Marchand HASH_LOG(ERR, "thash ctx %s memory allocation failed", 24999a2dd95SBruce Richardson name); 25099a2dd95SBruce Richardson rte_errno = ENOMEM; 25199a2dd95SBruce Richardson goto free_te; 25299a2dd95SBruce Richardson } 25399a2dd95SBruce Richardson 25499a2dd95SBruce Richardson rte_strlcpy(ctx->name, name, sizeof(ctx->name)); 25599a2dd95SBruce Richardson ctx->key_len = key_len; 25699a2dd95SBruce Richardson ctx->reta_sz_log = reta_sz; 25799a2dd95SBruce Richardson LIST_INIT(&ctx->head); 25899a2dd95SBruce Richardson ctx->flags = flags; 25999a2dd95SBruce Richardson 26099a2dd95SBruce Richardson if (key) 26199a2dd95SBruce Richardson rte_memcpy(ctx->hash_key, key, key_len); 26299a2dd95SBruce Richardson else { 26399a2dd95SBruce Richardson for (i = 0; i < key_len; i++) 26499a2dd95SBruce Richardson ctx->hash_key[i] = rte_rand(); 26599a2dd95SBruce Richardson } 26699a2dd95SBruce Richardson 267d27e2b7eSVladimir Medvedkin if (rte_thash_gfni_supported()) { 268d27e2b7eSVladimir Medvedkin ctx->matrices = rte_zmalloc(NULL, key_len * sizeof(uint64_t), 269d27e2b7eSVladimir Medvedkin RTE_CACHE_LINE_SIZE); 270d27e2b7eSVladimir Medvedkin if (ctx->matrices == NULL) { 271ae67895bSDavid Marchand HASH_LOG(ERR, "Cannot allocate matrices"); 272d27e2b7eSVladimir Medvedkin rte_errno = ENOMEM; 273d27e2b7eSVladimir Medvedkin goto free_ctx; 274d27e2b7eSVladimir Medvedkin } 275d27e2b7eSVladimir Medvedkin 276d27e2b7eSVladimir Medvedkin rte_thash_complete_matrix(ctx->matrices, ctx->hash_key, 277d27e2b7eSVladimir Medvedkin key_len); 278d27e2b7eSVladimir Medvedkin } 279d27e2b7eSVladimir Medvedkin 28099a2dd95SBruce Richardson te->data = (void *)ctx; 28199a2dd95SBruce Richardson TAILQ_INSERT_TAIL(thash_list, te, next); 28299a2dd95SBruce Richardson 28399a2dd95SBruce Richardson rte_mcfg_tailq_write_unlock(); 28499a2dd95SBruce Richardson 28599a2dd95SBruce Richardson return ctx; 286d27e2b7eSVladimir Medvedkin 287d27e2b7eSVladimir Medvedkin free_ctx: 288d27e2b7eSVladimir Medvedkin rte_free(ctx); 28999a2dd95SBruce Richardson free_te: 29099a2dd95SBruce Richardson rte_free(te); 29199a2dd95SBruce Richardson exit: 29299a2dd95SBruce Richardson rte_mcfg_tailq_write_unlock(); 29399a2dd95SBruce Richardson return NULL; 29499a2dd95SBruce Richardson } 29599a2dd95SBruce Richardson 29699a2dd95SBruce Richardson struct rte_thash_ctx * 29799a2dd95SBruce Richardson rte_thash_find_existing(const char *name) 29899a2dd95SBruce Richardson { 29999a2dd95SBruce Richardson struct rte_thash_ctx *ctx; 30099a2dd95SBruce Richardson struct rte_tailq_entry *te; 30199a2dd95SBruce Richardson struct rte_thash_list *thash_list; 30299a2dd95SBruce Richardson 30399a2dd95SBruce Richardson thash_list = RTE_TAILQ_CAST(rte_thash_tailq.head, rte_thash_list); 30499a2dd95SBruce Richardson 30599a2dd95SBruce Richardson rte_mcfg_tailq_read_lock(); 30699a2dd95SBruce Richardson TAILQ_FOREACH(te, thash_list, next) { 30799a2dd95SBruce Richardson ctx = (struct rte_thash_ctx *)te->data; 30899a2dd95SBruce Richardson if (strncmp(name, ctx->name, sizeof(ctx->name)) == 0) 30999a2dd95SBruce Richardson break; 31099a2dd95SBruce Richardson } 31199a2dd95SBruce Richardson 31299a2dd95SBruce Richardson rte_mcfg_tailq_read_unlock(); 31399a2dd95SBruce Richardson 31499a2dd95SBruce Richardson if (te == NULL) { 31599a2dd95SBruce Richardson rte_errno = ENOENT; 31699a2dd95SBruce Richardson return NULL; 31799a2dd95SBruce Richardson } 31899a2dd95SBruce Richardson 31999a2dd95SBruce Richardson return ctx; 32099a2dd95SBruce Richardson } 32199a2dd95SBruce Richardson 32299a2dd95SBruce Richardson void 32399a2dd95SBruce Richardson rte_thash_free_ctx(struct rte_thash_ctx *ctx) 32499a2dd95SBruce Richardson { 32599a2dd95SBruce Richardson struct rte_tailq_entry *te; 32699a2dd95SBruce Richardson struct rte_thash_list *thash_list; 32799a2dd95SBruce Richardson struct rte_thash_subtuple_helper *ent, *tmp; 32899a2dd95SBruce Richardson 32999a2dd95SBruce Richardson if (ctx == NULL) 33099a2dd95SBruce Richardson return; 33199a2dd95SBruce Richardson 33299a2dd95SBruce Richardson thash_list = RTE_TAILQ_CAST(rte_thash_tailq.head, rte_thash_list); 33399a2dd95SBruce Richardson rte_mcfg_tailq_write_lock(); 33499a2dd95SBruce Richardson TAILQ_FOREACH(te, thash_list, next) { 33599a2dd95SBruce Richardson if (te->data == (void *)ctx) 33699a2dd95SBruce Richardson break; 33799a2dd95SBruce Richardson } 33899a2dd95SBruce Richardson 33999a2dd95SBruce Richardson if (te != NULL) 34099a2dd95SBruce Richardson TAILQ_REMOVE(thash_list, te, next); 34199a2dd95SBruce Richardson 34299a2dd95SBruce Richardson rte_mcfg_tailq_write_unlock(); 34399a2dd95SBruce Richardson ent = LIST_FIRST(&(ctx->head)); 34499a2dd95SBruce Richardson while (ent) { 34599a2dd95SBruce Richardson free_lfsr(ent->lfsr); 34699a2dd95SBruce Richardson tmp = ent; 34799a2dd95SBruce Richardson ent = LIST_NEXT(ent, next); 34899a2dd95SBruce Richardson LIST_REMOVE(tmp, next); 34999a2dd95SBruce Richardson rte_free(tmp); 35099a2dd95SBruce Richardson } 35199a2dd95SBruce Richardson 35299a2dd95SBruce Richardson rte_free(ctx); 35399a2dd95SBruce Richardson rte_free(te); 35499a2dd95SBruce Richardson } 35599a2dd95SBruce Richardson 35699a2dd95SBruce Richardson static inline void 35799a2dd95SBruce Richardson set_bit(uint8_t *ptr, uint32_t bit, uint32_t pos) 35899a2dd95SBruce Richardson { 35999a2dd95SBruce Richardson uint32_t byte_idx = pos / CHAR_BIT; 36099a2dd95SBruce Richardson /* index of the bit int byte, indexing starts from MSB */ 36199a2dd95SBruce Richardson uint32_t bit_idx = (CHAR_BIT - 1) - (pos & (CHAR_BIT - 1)); 36299a2dd95SBruce Richardson uint8_t tmp; 36399a2dd95SBruce Richardson 36499a2dd95SBruce Richardson tmp = ptr[byte_idx]; 36599a2dd95SBruce Richardson tmp &= ~(1 << bit_idx); 36699a2dd95SBruce Richardson tmp |= bit << bit_idx; 36799a2dd95SBruce Richardson ptr[byte_idx] = tmp; 36899a2dd95SBruce Richardson } 36999a2dd95SBruce Richardson 37099a2dd95SBruce Richardson /** 37199a2dd95SBruce Richardson * writes m-sequence to the hash_key for range [start, end] 37299a2dd95SBruce Richardson * (i.e. including start and end positions) 37399a2dd95SBruce Richardson */ 37499a2dd95SBruce Richardson static int 37599a2dd95SBruce Richardson generate_subkey(struct rte_thash_ctx *ctx, struct thash_lfsr *lfsr, 37699a2dd95SBruce Richardson uint32_t start, uint32_t end) 37799a2dd95SBruce Richardson { 37899a2dd95SBruce Richardson uint32_t i; 37999a2dd95SBruce Richardson uint32_t req_bits = (start < end) ? (end - start) : (start - end); 38099a2dd95SBruce Richardson req_bits++; /* due to including end */ 38199a2dd95SBruce Richardson 38299a2dd95SBruce Richardson /* check if lfsr overflow period of the m-sequence */ 38399a2dd95SBruce Richardson if (((lfsr->bits_cnt + req_bits) > (1ULL << lfsr->deg) - 1) && 38499a2dd95SBruce Richardson ((ctx->flags & RTE_THASH_IGNORE_PERIOD_OVERFLOW) != 38599a2dd95SBruce Richardson RTE_THASH_IGNORE_PERIOD_OVERFLOW)) { 386ae67895bSDavid Marchand HASH_LOG(ERR, 387ae67895bSDavid Marchand "Can't generate m-sequence due to period overflow"); 38899a2dd95SBruce Richardson return -ENOSPC; 38999a2dd95SBruce Richardson } 39099a2dd95SBruce Richardson 39199a2dd95SBruce Richardson if (start < end) { 39299a2dd95SBruce Richardson /* original direction (from left to right)*/ 39399a2dd95SBruce Richardson for (i = start; i <= end; i++) 39499a2dd95SBruce Richardson set_bit(ctx->hash_key, get_bit_lfsr(lfsr), i); 39599a2dd95SBruce Richardson 39699a2dd95SBruce Richardson } else { 39799a2dd95SBruce Richardson /* reverse direction (from right to left) */ 39899a2dd95SBruce Richardson for (i = end; i >= start; i--) 39999a2dd95SBruce Richardson set_bit(ctx->hash_key, get_rev_bit_lfsr(lfsr), i); 40099a2dd95SBruce Richardson } 40199a2dd95SBruce Richardson 402d27e2b7eSVladimir Medvedkin if (ctx->matrices != NULL) 403d27e2b7eSVladimir Medvedkin rte_thash_complete_matrix(ctx->matrices, ctx->hash_key, 404d27e2b7eSVladimir Medvedkin ctx->key_len); 405d27e2b7eSVladimir Medvedkin 40699a2dd95SBruce Richardson return 0; 40799a2dd95SBruce Richardson } 40899a2dd95SBruce Richardson 40999a2dd95SBruce Richardson static inline uint32_t 41099a2dd95SBruce Richardson get_subvalue(struct rte_thash_ctx *ctx, uint32_t offset) 41199a2dd95SBruce Richardson { 41299a2dd95SBruce Richardson uint32_t *tmp, val; 41399a2dd95SBruce Richardson 41499a2dd95SBruce Richardson tmp = (uint32_t *)(&ctx->hash_key[offset >> 3]); 41599a2dd95SBruce Richardson val = rte_be_to_cpu_32(*tmp); 41699a2dd95SBruce Richardson val >>= (TOEPLITZ_HASH_LEN - ((offset & (CHAR_BIT - 1)) + 41799a2dd95SBruce Richardson ctx->reta_sz_log)); 41899a2dd95SBruce Richardson 41999a2dd95SBruce Richardson return val & ((1 << ctx->reta_sz_log) - 1); 42099a2dd95SBruce Richardson } 42199a2dd95SBruce Richardson 42299a2dd95SBruce Richardson static inline void 42399a2dd95SBruce Richardson generate_complement_table(struct rte_thash_ctx *ctx, 42499a2dd95SBruce Richardson struct rte_thash_subtuple_helper *h) 42599a2dd95SBruce Richardson { 42699a2dd95SBruce Richardson int i, j, k; 42799a2dd95SBruce Richardson uint32_t val; 42899a2dd95SBruce Richardson uint32_t start; 42999a2dd95SBruce Richardson 43099a2dd95SBruce Richardson start = h->offset + h->len - (2 * ctx->reta_sz_log - 1); 43199a2dd95SBruce Richardson 43299a2dd95SBruce Richardson for (i = 1; i < (1 << ctx->reta_sz_log); i++) { 43399a2dd95SBruce Richardson val = 0; 43499a2dd95SBruce Richardson for (j = i; j; j &= (j - 1)) { 43599a2dd95SBruce Richardson k = rte_bsf32(j); 43699a2dd95SBruce Richardson val ^= get_subvalue(ctx, start - k + 43799a2dd95SBruce Richardson ctx->reta_sz_log - 1); 43899a2dd95SBruce Richardson } 43999a2dd95SBruce Richardson h->compl_table[val] = i; 44099a2dd95SBruce Richardson } 44199a2dd95SBruce Richardson } 44299a2dd95SBruce Richardson 44399a2dd95SBruce Richardson static inline int 44499a2dd95SBruce Richardson insert_before(struct rte_thash_ctx *ctx, 44599a2dd95SBruce Richardson struct rte_thash_subtuple_helper *ent, 44699a2dd95SBruce Richardson struct rte_thash_subtuple_helper *cur_ent, 44799a2dd95SBruce Richardson struct rte_thash_subtuple_helper *next_ent, 44899a2dd95SBruce Richardson uint32_t start, uint32_t end, uint32_t range_end) 44999a2dd95SBruce Richardson { 45099a2dd95SBruce Richardson int ret; 45199a2dd95SBruce Richardson 45299a2dd95SBruce Richardson if (end < cur_ent->offset) { 453f9773e66SVladimir Medvedkin ent->lfsr = alloc_lfsr(ctx->reta_sz_log); 45499a2dd95SBruce Richardson if (ent->lfsr == NULL) { 45599a2dd95SBruce Richardson rte_free(ent); 45699a2dd95SBruce Richardson return -ENOMEM; 45799a2dd95SBruce Richardson } 45899a2dd95SBruce Richardson /* generate nonoverlapping range [start, end) */ 45999a2dd95SBruce Richardson ret = generate_subkey(ctx, ent->lfsr, start, end - 1); 46099a2dd95SBruce Richardson if (ret != 0) { 46199a2dd95SBruce Richardson free_lfsr(ent->lfsr); 46299a2dd95SBruce Richardson rte_free(ent); 46399a2dd95SBruce Richardson return ret; 46499a2dd95SBruce Richardson } 46599a2dd95SBruce Richardson } else if ((next_ent != NULL) && (end > next_ent->offset)) { 466ae67895bSDavid Marchand HASH_LOG(ERR, 46799a2dd95SBruce Richardson "Can't add helper %s due to conflict with existing" 468ae67895bSDavid Marchand " helper %s", ent->name, next_ent->name); 469adeca668SVladimir Medvedkin rte_free(ent); 47099a2dd95SBruce Richardson return -ENOSPC; 47199a2dd95SBruce Richardson } 47299a2dd95SBruce Richardson attach_lfsr(ent, cur_ent->lfsr); 47399a2dd95SBruce Richardson 47499a2dd95SBruce Richardson /** 47599a2dd95SBruce Richardson * generate partially overlapping range 47699a2dd95SBruce Richardson * [start, cur_ent->start) in reverse order 47799a2dd95SBruce Richardson */ 47899a2dd95SBruce Richardson ret = generate_subkey(ctx, ent->lfsr, cur_ent->offset - 1, start); 47999a2dd95SBruce Richardson if (ret != 0) { 48099a2dd95SBruce Richardson free_lfsr(ent->lfsr); 48199a2dd95SBruce Richardson rte_free(ent); 48299a2dd95SBruce Richardson return ret; 48399a2dd95SBruce Richardson } 48499a2dd95SBruce Richardson 48599a2dd95SBruce Richardson if (end > range_end) { 48699a2dd95SBruce Richardson /** 48799a2dd95SBruce Richardson * generate partially overlapping range 48899a2dd95SBruce Richardson * (range_end, end) 48999a2dd95SBruce Richardson */ 49099a2dd95SBruce Richardson ret = generate_subkey(ctx, ent->lfsr, range_end, end - 1); 49199a2dd95SBruce Richardson if (ret != 0) { 49299a2dd95SBruce Richardson free_lfsr(ent->lfsr); 49399a2dd95SBruce Richardson rte_free(ent); 49499a2dd95SBruce Richardson return ret; 49599a2dd95SBruce Richardson } 49699a2dd95SBruce Richardson } 49799a2dd95SBruce Richardson 49899a2dd95SBruce Richardson LIST_INSERT_BEFORE(cur_ent, ent, next); 49999a2dd95SBruce Richardson generate_complement_table(ctx, ent); 50099a2dd95SBruce Richardson ctx->subtuples_nb++; 50199a2dd95SBruce Richardson return 0; 50299a2dd95SBruce Richardson } 50399a2dd95SBruce Richardson 50499a2dd95SBruce Richardson static inline int 50599a2dd95SBruce Richardson insert_after(struct rte_thash_ctx *ctx, 50699a2dd95SBruce Richardson struct rte_thash_subtuple_helper *ent, 50799a2dd95SBruce Richardson struct rte_thash_subtuple_helper *cur_ent, 50899a2dd95SBruce Richardson struct rte_thash_subtuple_helper *next_ent, 50999a2dd95SBruce Richardson struct rte_thash_subtuple_helper *prev_ent, 51099a2dd95SBruce Richardson uint32_t end, uint32_t range_end) 51199a2dd95SBruce Richardson { 51299a2dd95SBruce Richardson int ret; 51399a2dd95SBruce Richardson 51499a2dd95SBruce Richardson if ((next_ent != NULL) && (end > next_ent->offset)) { 515ae67895bSDavid Marchand HASH_LOG(ERR, 51699a2dd95SBruce Richardson "Can't add helper %s due to conflict with existing" 517ae67895bSDavid Marchand " helper %s", ent->name, next_ent->name); 518adeca668SVladimir Medvedkin rte_free(ent); 51999a2dd95SBruce Richardson return -EEXIST; 52099a2dd95SBruce Richardson } 52199a2dd95SBruce Richardson 52299a2dd95SBruce Richardson attach_lfsr(ent, cur_ent->lfsr); 52399a2dd95SBruce Richardson if (end > range_end) { 52499a2dd95SBruce Richardson /** 52599a2dd95SBruce Richardson * generate partially overlapping range 52699a2dd95SBruce Richardson * (range_end, end) 52799a2dd95SBruce Richardson */ 52899a2dd95SBruce Richardson ret = generate_subkey(ctx, ent->lfsr, range_end, end - 1); 52999a2dd95SBruce Richardson if (ret != 0) { 53099a2dd95SBruce Richardson free_lfsr(ent->lfsr); 53199a2dd95SBruce Richardson rte_free(ent); 53299a2dd95SBruce Richardson return ret; 53399a2dd95SBruce Richardson } 53499a2dd95SBruce Richardson } 53599a2dd95SBruce Richardson 53699a2dd95SBruce Richardson LIST_INSERT_AFTER(prev_ent, ent, next); 53799a2dd95SBruce Richardson generate_complement_table(ctx, ent); 53899a2dd95SBruce Richardson ctx->subtuples_nb++; 53999a2dd95SBruce Richardson 54099a2dd95SBruce Richardson return 0; 54199a2dd95SBruce Richardson } 54299a2dd95SBruce Richardson 54399a2dd95SBruce Richardson int 54499a2dd95SBruce Richardson rte_thash_add_helper(struct rte_thash_ctx *ctx, const char *name, uint32_t len, 54599a2dd95SBruce Richardson uint32_t offset) 54699a2dd95SBruce Richardson { 54799a2dd95SBruce Richardson struct rte_thash_subtuple_helper *ent, *cur_ent, *prev_ent, *next_ent; 54899a2dd95SBruce Richardson uint32_t start, end; 54999a2dd95SBruce Richardson int ret; 55099a2dd95SBruce Richardson 55199a2dd95SBruce Richardson if ((ctx == NULL) || (name == NULL) || (len < ctx->reta_sz_log) || 55299a2dd95SBruce Richardson ((offset + len + TOEPLITZ_HASH_LEN - 1) > 55399a2dd95SBruce Richardson ctx->key_len * CHAR_BIT)) 55499a2dd95SBruce Richardson return -EINVAL; 55599a2dd95SBruce Richardson 55699a2dd95SBruce Richardson /* Check for existing name*/ 55799a2dd95SBruce Richardson LIST_FOREACH(cur_ent, &ctx->head, next) { 55899a2dd95SBruce Richardson if (strncmp(name, cur_ent->name, sizeof(cur_ent->name)) == 0) 55999a2dd95SBruce Richardson return -EEXIST; 56099a2dd95SBruce Richardson } 56199a2dd95SBruce Richardson 56299a2dd95SBruce Richardson end = offset + len + TOEPLITZ_HASH_LEN - 1; 56399a2dd95SBruce Richardson start = ((ctx->flags & RTE_THASH_MINIMAL_SEQ) == 56499a2dd95SBruce Richardson RTE_THASH_MINIMAL_SEQ) ? (end - (2 * ctx->reta_sz_log - 1)) : 56599a2dd95SBruce Richardson offset; 56699a2dd95SBruce Richardson 56799a2dd95SBruce Richardson ent = rte_zmalloc(NULL, sizeof(struct rte_thash_subtuple_helper) + 56899a2dd95SBruce Richardson sizeof(uint32_t) * (1 << ctx->reta_sz_log), 56999a2dd95SBruce Richardson RTE_CACHE_LINE_SIZE); 57099a2dd95SBruce Richardson if (ent == NULL) 57199a2dd95SBruce Richardson return -ENOMEM; 57299a2dd95SBruce Richardson 57399a2dd95SBruce Richardson rte_strlcpy(ent->name, name, sizeof(ent->name)); 57499a2dd95SBruce Richardson ent->offset = start; 57599a2dd95SBruce Richardson ent->len = end - start; 57699a2dd95SBruce Richardson ent->tuple_offset = offset; 57799a2dd95SBruce Richardson ent->tuple_len = len; 57899a2dd95SBruce Richardson ent->lsb_msk = (1 << ctx->reta_sz_log) - 1; 57999a2dd95SBruce Richardson 58099a2dd95SBruce Richardson cur_ent = LIST_FIRST(&ctx->head); 58199a2dd95SBruce Richardson while (cur_ent) { 58299a2dd95SBruce Richardson uint32_t range_end = cur_ent->offset + cur_ent->len; 58399a2dd95SBruce Richardson next_ent = LIST_NEXT(cur_ent, next); 58499a2dd95SBruce Richardson prev_ent = cur_ent; 58599a2dd95SBruce Richardson /* Iterate through overlapping ranges */ 58699a2dd95SBruce Richardson while ((next_ent != NULL) && (next_ent->offset < range_end)) { 58799a2dd95SBruce Richardson range_end = RTE_MAX(next_ent->offset + next_ent->len, 58899a2dd95SBruce Richardson range_end); 58999a2dd95SBruce Richardson if (start > next_ent->offset) 59099a2dd95SBruce Richardson prev_ent = next_ent; 59199a2dd95SBruce Richardson 59299a2dd95SBruce Richardson next_ent = LIST_NEXT(next_ent, next); 59399a2dd95SBruce Richardson } 59499a2dd95SBruce Richardson 59599a2dd95SBruce Richardson if (start < cur_ent->offset) 59699a2dd95SBruce Richardson return insert_before(ctx, ent, cur_ent, next_ent, 59799a2dd95SBruce Richardson start, end, range_end); 59899a2dd95SBruce Richardson else if (start < range_end) 59999a2dd95SBruce Richardson return insert_after(ctx, ent, cur_ent, next_ent, 60099a2dd95SBruce Richardson prev_ent, end, range_end); 60199a2dd95SBruce Richardson 60299a2dd95SBruce Richardson cur_ent = next_ent; 60399a2dd95SBruce Richardson continue; 60499a2dd95SBruce Richardson } 60599a2dd95SBruce Richardson 606f9773e66SVladimir Medvedkin ent->lfsr = alloc_lfsr(ctx->reta_sz_log); 60799a2dd95SBruce Richardson if (ent->lfsr == NULL) { 60899a2dd95SBruce Richardson rte_free(ent); 60999a2dd95SBruce Richardson return -ENOMEM; 61099a2dd95SBruce Richardson } 61199a2dd95SBruce Richardson 61299a2dd95SBruce Richardson /* generate nonoverlapping range [start, end) */ 61399a2dd95SBruce Richardson ret = generate_subkey(ctx, ent->lfsr, start, end - 1); 61499a2dd95SBruce Richardson if (ret != 0) { 61599a2dd95SBruce Richardson free_lfsr(ent->lfsr); 61699a2dd95SBruce Richardson rte_free(ent); 61799a2dd95SBruce Richardson return ret; 61899a2dd95SBruce Richardson } 61999a2dd95SBruce Richardson if (LIST_EMPTY(&ctx->head)) { 62099a2dd95SBruce Richardson LIST_INSERT_HEAD(&ctx->head, ent, next); 62199a2dd95SBruce Richardson } else { 62299a2dd95SBruce Richardson LIST_FOREACH(next_ent, &ctx->head, next) 62399a2dd95SBruce Richardson prev_ent = next_ent; 62499a2dd95SBruce Richardson 62599a2dd95SBruce Richardson LIST_INSERT_AFTER(prev_ent, ent, next); 62699a2dd95SBruce Richardson } 62799a2dd95SBruce Richardson generate_complement_table(ctx, ent); 62899a2dd95SBruce Richardson ctx->subtuples_nb++; 62999a2dd95SBruce Richardson 63099a2dd95SBruce Richardson return 0; 63199a2dd95SBruce Richardson } 63299a2dd95SBruce Richardson 63399a2dd95SBruce Richardson struct rte_thash_subtuple_helper * 63499a2dd95SBruce Richardson rte_thash_get_helper(struct rte_thash_ctx *ctx, const char *name) 63599a2dd95SBruce Richardson { 63699a2dd95SBruce Richardson struct rte_thash_subtuple_helper *ent; 63799a2dd95SBruce Richardson 63899a2dd95SBruce Richardson if ((ctx == NULL) || (name == NULL)) 63999a2dd95SBruce Richardson return NULL; 64099a2dd95SBruce Richardson 64199a2dd95SBruce Richardson LIST_FOREACH(ent, &ctx->head, next) { 64299a2dd95SBruce Richardson if (strncmp(name, ent->name, sizeof(ent->name)) == 0) 64399a2dd95SBruce Richardson return ent; 64499a2dd95SBruce Richardson } 64599a2dd95SBruce Richardson 64699a2dd95SBruce Richardson return NULL; 64799a2dd95SBruce Richardson } 64899a2dd95SBruce Richardson 64999a2dd95SBruce Richardson uint32_t 65099a2dd95SBruce Richardson rte_thash_get_complement(struct rte_thash_subtuple_helper *h, 65199a2dd95SBruce Richardson uint32_t hash, uint32_t desired_hash) 65299a2dd95SBruce Richardson { 65399a2dd95SBruce Richardson return h->compl_table[(hash ^ desired_hash) & h->lsb_msk]; 65499a2dd95SBruce Richardson } 65599a2dd95SBruce Richardson 65699a2dd95SBruce Richardson const uint8_t * 65799a2dd95SBruce Richardson rte_thash_get_key(struct rte_thash_ctx *ctx) 65899a2dd95SBruce Richardson { 65999a2dd95SBruce Richardson return ctx->hash_key; 66099a2dd95SBruce Richardson } 66199a2dd95SBruce Richardson 662d27e2b7eSVladimir Medvedkin const uint64_t * 663d27e2b7eSVladimir Medvedkin rte_thash_get_gfni_matrices(struct rte_thash_ctx *ctx) 664d27e2b7eSVladimir Medvedkin { 665d27e2b7eSVladimir Medvedkin return ctx->matrices; 666d27e2b7eSVladimir Medvedkin } 667d27e2b7eSVladimir Medvedkin 668be81f77dSVladimir Medvedkin static inline uint8_t 6693a54f8deSVladimir Medvedkin read_unaligned_byte(uint8_t *ptr, unsigned int offset) 67099a2dd95SBruce Richardson { 671be81f77dSVladimir Medvedkin uint8_t ret = 0; 672be81f77dSVladimir Medvedkin 673be81f77dSVladimir Medvedkin ret = ptr[offset / CHAR_BIT]; 674be81f77dSVladimir Medvedkin if (offset % CHAR_BIT) { 675be81f77dSVladimir Medvedkin ret <<= (offset % CHAR_BIT); 676be81f77dSVladimir Medvedkin ret |= ptr[(offset / CHAR_BIT) + 1] >> 677be81f77dSVladimir Medvedkin (CHAR_BIT - (offset % CHAR_BIT)); 678be81f77dSVladimir Medvedkin } 679be81f77dSVladimir Medvedkin 6803a54f8deSVladimir Medvedkin return ret; 681be81f77dSVladimir Medvedkin } 682be81f77dSVladimir Medvedkin 683be81f77dSVladimir Medvedkin static inline uint32_t 684be81f77dSVladimir Medvedkin read_unaligned_bits(uint8_t *ptr, int len, int offset) 685be81f77dSVladimir Medvedkin { 686be81f77dSVladimir Medvedkin uint32_t ret = 0; 6873a54f8deSVladimir Medvedkin int shift; 688be81f77dSVladimir Medvedkin 689be81f77dSVladimir Medvedkin len = RTE_MAX(len, 0); 690be81f77dSVladimir Medvedkin len = RTE_MIN(len, (int)(sizeof(uint32_t) * CHAR_BIT)); 691be81f77dSVladimir Medvedkin 692be81f77dSVladimir Medvedkin while (len > 0) { 693be81f77dSVladimir Medvedkin ret <<= CHAR_BIT; 694be81f77dSVladimir Medvedkin 6953a54f8deSVladimir Medvedkin ret |= read_unaligned_byte(ptr, offset); 696be81f77dSVladimir Medvedkin offset += CHAR_BIT; 697be81f77dSVladimir Medvedkin len -= CHAR_BIT; 698be81f77dSVladimir Medvedkin } 699be81f77dSVladimir Medvedkin 7003a54f8deSVladimir Medvedkin shift = (len == 0) ? 0 : 7013a54f8deSVladimir Medvedkin (CHAR_BIT - ((len + CHAR_BIT) % CHAR_BIT)); 7023a54f8deSVladimir Medvedkin return ret >> shift; 703be81f77dSVladimir Medvedkin } 704be81f77dSVladimir Medvedkin 705be81f77dSVladimir Medvedkin /* returns mask for len bits with given offset inside byte */ 706be81f77dSVladimir Medvedkin static inline uint8_t 707be81f77dSVladimir Medvedkin get_bits_mask(unsigned int len, unsigned int offset) 708be81f77dSVladimir Medvedkin { 709be81f77dSVladimir Medvedkin unsigned int last_bit; 710be81f77dSVladimir Medvedkin 711be81f77dSVladimir Medvedkin offset %= CHAR_BIT; 712be81f77dSVladimir Medvedkin /* last bit within byte */ 713be81f77dSVladimir Medvedkin last_bit = RTE_MIN((unsigned int)CHAR_BIT, offset + len); 714be81f77dSVladimir Medvedkin 715be81f77dSVladimir Medvedkin return ((1 << (CHAR_BIT - offset)) - 1) ^ 716be81f77dSVladimir Medvedkin ((1 << (CHAR_BIT - last_bit)) - 1); 717be81f77dSVladimir Medvedkin } 718be81f77dSVladimir Medvedkin 719be81f77dSVladimir Medvedkin static inline void 720be81f77dSVladimir Medvedkin write_unaligned_byte(uint8_t *ptr, unsigned int len, 721be81f77dSVladimir Medvedkin unsigned int offset, uint8_t val) 722be81f77dSVladimir Medvedkin { 72399a2dd95SBruce Richardson uint8_t tmp; 72499a2dd95SBruce Richardson 725be81f77dSVladimir Medvedkin tmp = ptr[offset / CHAR_BIT]; 726be81f77dSVladimir Medvedkin tmp &= ~get_bits_mask(len, offset); 727be81f77dSVladimir Medvedkin tmp |= ((val << (CHAR_BIT - len)) >> (offset % CHAR_BIT)); 728be81f77dSVladimir Medvedkin ptr[offset / CHAR_BIT] = tmp; 729be81f77dSVladimir Medvedkin if (((offset + len) / CHAR_BIT) != (offset / CHAR_BIT)) { 730be81f77dSVladimir Medvedkin int rest_len = (offset + len) % CHAR_BIT; 731be81f77dSVladimir Medvedkin tmp = ptr[(offset + len) / CHAR_BIT]; 732be81f77dSVladimir Medvedkin tmp &= ~get_bits_mask(rest_len, 0); 733be81f77dSVladimir Medvedkin tmp |= val << (CHAR_BIT - rest_len); 734be81f77dSVladimir Medvedkin ptr[(offset + len) / CHAR_BIT] = tmp; 735be81f77dSVladimir Medvedkin } 736be81f77dSVladimir Medvedkin } 737be81f77dSVladimir Medvedkin 738be81f77dSVladimir Medvedkin static inline void 739be81f77dSVladimir Medvedkin write_unaligned_bits(uint8_t *ptr, int len, int offset, uint32_t val) 740be81f77dSVladimir Medvedkin { 741be81f77dSVladimir Medvedkin uint8_t tmp; 742be81f77dSVladimir Medvedkin unsigned int part_len; 743be81f77dSVladimir Medvedkin 744be81f77dSVladimir Medvedkin len = RTE_MAX(len, 0); 745be81f77dSVladimir Medvedkin len = RTE_MIN(len, (int)(sizeof(uint32_t) * CHAR_BIT)); 746be81f77dSVladimir Medvedkin 747be81f77dSVladimir Medvedkin while (len > 0) { 748be81f77dSVladimir Medvedkin part_len = RTE_MIN(CHAR_BIT, len); 749be81f77dSVladimir Medvedkin tmp = (uint8_t)val & ((1 << part_len) - 1); 750be81f77dSVladimir Medvedkin write_unaligned_byte(ptr, part_len, 751be81f77dSVladimir Medvedkin offset + len - part_len, tmp); 752be81f77dSVladimir Medvedkin len -= CHAR_BIT; 753be81f77dSVladimir Medvedkin val >>= CHAR_BIT; 754be81f77dSVladimir Medvedkin } 75599a2dd95SBruce Richardson } 75699a2dd95SBruce Richardson 75799a2dd95SBruce Richardson int 75899a2dd95SBruce Richardson rte_thash_adjust_tuple(struct rte_thash_ctx *ctx, 75999a2dd95SBruce Richardson struct rte_thash_subtuple_helper *h, 76099a2dd95SBruce Richardson uint8_t *tuple, unsigned int tuple_len, 76199a2dd95SBruce Richardson uint32_t desired_value, unsigned int attempts, 76299a2dd95SBruce Richardson rte_thash_check_tuple_t fn, void *userdata) 76399a2dd95SBruce Richardson { 76499a2dd95SBruce Richardson uint32_t tmp_tuple[tuple_len / sizeof(uint32_t)]; 76599a2dd95SBruce Richardson unsigned int i, j, ret = 0; 76699a2dd95SBruce Richardson uint32_t hash, adj_bits; 76799a2dd95SBruce Richardson const uint8_t *hash_key; 768be81f77dSVladimir Medvedkin uint32_t tmp; 769be81f77dSVladimir Medvedkin int offset; 770be81f77dSVladimir Medvedkin int tmp_len; 77199a2dd95SBruce Richardson 77299a2dd95SBruce Richardson if ((ctx == NULL) || (h == NULL) || (tuple == NULL) || 77399a2dd95SBruce Richardson (tuple_len % sizeof(uint32_t) != 0) || (attempts <= 0)) 77499a2dd95SBruce Richardson return -EINVAL; 77599a2dd95SBruce Richardson 77699a2dd95SBruce Richardson hash_key = rte_thash_get_key(ctx); 77799a2dd95SBruce Richardson 778be81f77dSVladimir Medvedkin attempts = RTE_MIN(attempts, 1U << (h->tuple_len - ctx->reta_sz_log)); 779be81f77dSVladimir Medvedkin 78099a2dd95SBruce Richardson for (i = 0; i < attempts; i++) { 781d27e2b7eSVladimir Medvedkin if (ctx->matrices != NULL) 782d27e2b7eSVladimir Medvedkin hash = rte_thash_gfni(ctx->matrices, tuple, tuple_len); 783d27e2b7eSVladimir Medvedkin else { 78499a2dd95SBruce Richardson for (j = 0; j < (tuple_len / 4); j++) 78599a2dd95SBruce Richardson tmp_tuple[j] = 786d27e2b7eSVladimir Medvedkin rte_be_to_cpu_32( 787d27e2b7eSVladimir Medvedkin *(uint32_t *)&tuple[j * 4]); 78899a2dd95SBruce Richardson 78999a2dd95SBruce Richardson hash = rte_softrss(tmp_tuple, tuple_len / 4, hash_key); 790d27e2b7eSVladimir Medvedkin } 791d27e2b7eSVladimir Medvedkin 79299a2dd95SBruce Richardson adj_bits = rte_thash_get_complement(h, hash, desired_value); 79399a2dd95SBruce Richardson 79499a2dd95SBruce Richardson /* 79599a2dd95SBruce Richardson * Hint: LSB of adj_bits corresponds to 796be81f77dSVladimir Medvedkin * offset + len bit of the subtuple 79799a2dd95SBruce Richardson */ 798be81f77dSVladimir Medvedkin offset = h->tuple_offset + h->tuple_len - ctx->reta_sz_log; 799be81f77dSVladimir Medvedkin tmp = read_unaligned_bits(tuple, ctx->reta_sz_log, offset); 800be81f77dSVladimir Medvedkin tmp ^= adj_bits; 801be81f77dSVladimir Medvedkin write_unaligned_bits(tuple, ctx->reta_sz_log, offset, tmp); 80299a2dd95SBruce Richardson 80399a2dd95SBruce Richardson if (fn != NULL) { 80499a2dd95SBruce Richardson ret = (fn(userdata, tuple)) ? 0 : -EEXIST; 80599a2dd95SBruce Richardson if (ret == 0) 80699a2dd95SBruce Richardson return 0; 80799a2dd95SBruce Richardson else if (i < (attempts - 1)) { 808be81f77dSVladimir Medvedkin /* increment subtuple part by 1 */ 809be81f77dSVladimir Medvedkin tmp_len = RTE_MIN(sizeof(uint32_t) * CHAR_BIT, 810be81f77dSVladimir Medvedkin h->tuple_len - ctx->reta_sz_log); 811be81f77dSVladimir Medvedkin offset -= tmp_len; 812be81f77dSVladimir Medvedkin tmp = read_unaligned_bits(tuple, tmp_len, 813be81f77dSVladimir Medvedkin offset); 814be81f77dSVladimir Medvedkin tmp++; 815be81f77dSVladimir Medvedkin tmp &= (1 << tmp_len) - 1; 816be81f77dSVladimir Medvedkin write_unaligned_bits(tuple, tmp_len, offset, 817be81f77dSVladimir Medvedkin tmp); 81899a2dd95SBruce Richardson } 81999a2dd95SBruce Richardson } else 82099a2dd95SBruce Richardson return 0; 82199a2dd95SBruce Richardson } 82299a2dd95SBruce Richardson 82399a2dd95SBruce Richardson return ret; 82499a2dd95SBruce Richardson } 825*6addb781SVladimir Medvedkin 826*6addb781SVladimir Medvedkin int 827*6addb781SVladimir Medvedkin rte_thash_gen_key(uint8_t *key, size_t key_len, size_t reta_sz_log, 828*6addb781SVladimir Medvedkin uint32_t entropy_start, size_t entropy_sz) 829*6addb781SVladimir Medvedkin { 830*6addb781SVladimir Medvedkin size_t i, end, start; 831*6addb781SVladimir Medvedkin 832*6addb781SVladimir Medvedkin /* define lfsr sequence range*/ 833*6addb781SVladimir Medvedkin end = entropy_start + entropy_sz + TOEPLITZ_HASH_LEN - 1; 834*6addb781SVladimir Medvedkin start = end - (entropy_sz + reta_sz_log - 1); 835*6addb781SVladimir Medvedkin 836*6addb781SVladimir Medvedkin if ((key == NULL) || (key_len * CHAR_BIT < entropy_start + entropy_sz) || 837*6addb781SVladimir Medvedkin (entropy_sz < reta_sz_log) || (reta_sz_log > TOEPLITZ_HASH_LEN)) 838*6addb781SVladimir Medvedkin return -EINVAL; 839*6addb781SVladimir Medvedkin 840*6addb781SVladimir Medvedkin struct thash_lfsr *lfsr = alloc_lfsr(reta_sz_log); 841*6addb781SVladimir Medvedkin if (lfsr == NULL) 842*6addb781SVladimir Medvedkin return -ENOMEM; 843*6addb781SVladimir Medvedkin 844*6addb781SVladimir Medvedkin for (i = start; i < end; i++) 845*6addb781SVladimir Medvedkin set_bit(key, get_bit_lfsr(lfsr), i); 846*6addb781SVladimir Medvedkin 847*6addb781SVladimir Medvedkin free_lfsr(lfsr); 848*6addb781SVladimir Medvedkin 849*6addb781SVladimir Medvedkin return 0; 850*6addb781SVladimir Medvedkin } 851