143d01767SNithin Dabilpuram /* SPDX-License-Identifier: BSD-3-Clause 243d01767SNithin Dabilpuram * Copyright(c) 2018 Cavium, Inc 343d01767SNithin Dabilpuram */ 443d01767SNithin Dabilpuram 543d01767SNithin Dabilpuram #ifndef _CPT_UCODE_H_ 643d01767SNithin Dabilpuram #define _CPT_UCODE_H_ 743d01767SNithin Dabilpuram 8*b74652f3SRagothaman Jayaraman #include <stdbool.h> 9*b74652f3SRagothaman Jayaraman 10*b74652f3SRagothaman Jayaraman #include "cpt_common.h" 11*b74652f3SRagothaman Jayaraman #include "cpt_hw_types.h" 1243d01767SNithin Dabilpuram #include "cpt_mcode_defines.h" 1343d01767SNithin Dabilpuram 1443d01767SNithin Dabilpuram /* 1543d01767SNithin Dabilpuram * This file defines functions that are interfaces to microcode spec. 1643d01767SNithin Dabilpuram * 1743d01767SNithin Dabilpuram */ 1843d01767SNithin Dabilpuram 196cc54096SNithin Dabilpuram static uint8_t zuc_d[32] = { 206cc54096SNithin Dabilpuram 0x44, 0xD7, 0x26, 0xBC, 0x62, 0x6B, 0x13, 0x5E, 216cc54096SNithin Dabilpuram 0x57, 0x89, 0x35, 0xE2, 0x71, 0x35, 0x09, 0xAF, 226cc54096SNithin Dabilpuram 0x4D, 0x78, 0x2F, 0x13, 0x6B, 0xC4, 0x1A, 0xF1, 236cc54096SNithin Dabilpuram 0x5E, 0x26, 0x3C, 0x4D, 0x78, 0x9A, 0x47, 0xAC 246cc54096SNithin Dabilpuram }; 256cc54096SNithin Dabilpuram 2643d01767SNithin Dabilpuram static __rte_always_inline int 2743d01767SNithin Dabilpuram cpt_is_algo_supported(struct rte_crypto_sym_xform *xform) 2843d01767SNithin Dabilpuram { 2943d01767SNithin Dabilpuram /* 3043d01767SNithin Dabilpuram * Microcode only supports the following combination. 3143d01767SNithin Dabilpuram * Encryption followed by authentication 3243d01767SNithin Dabilpuram * Authentication followed by decryption 3343d01767SNithin Dabilpuram */ 3443d01767SNithin Dabilpuram if (xform->next) { 3543d01767SNithin Dabilpuram if ((xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) && 3643d01767SNithin Dabilpuram (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER) && 3743d01767SNithin Dabilpuram (xform->next->cipher.op == RTE_CRYPTO_CIPHER_OP_ENCRYPT)) { 3843d01767SNithin Dabilpuram /* Unsupported as of now by microcode */ 3943d01767SNithin Dabilpuram CPT_LOG_DP_ERR("Unsupported combination"); 4043d01767SNithin Dabilpuram return -1; 4143d01767SNithin Dabilpuram } 4243d01767SNithin Dabilpuram if ((xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) && 4343d01767SNithin Dabilpuram (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH) && 4443d01767SNithin Dabilpuram (xform->cipher.op == RTE_CRYPTO_CIPHER_OP_DECRYPT)) { 4543d01767SNithin Dabilpuram /* For GMAC auth there is no cipher operation */ 4643d01767SNithin Dabilpuram if (xform->aead.algo != RTE_CRYPTO_AEAD_AES_GCM || 4743d01767SNithin Dabilpuram xform->next->auth.algo != 4843d01767SNithin Dabilpuram RTE_CRYPTO_AUTH_AES_GMAC) { 4943d01767SNithin Dabilpuram /* Unsupported as of now by microcode */ 5043d01767SNithin Dabilpuram CPT_LOG_DP_ERR("Unsupported combination"); 5143d01767SNithin Dabilpuram return -1; 5243d01767SNithin Dabilpuram } 5343d01767SNithin Dabilpuram } 5443d01767SNithin Dabilpuram } 5543d01767SNithin Dabilpuram return 0; 5643d01767SNithin Dabilpuram } 5743d01767SNithin Dabilpuram 586cc54096SNithin Dabilpuram static __rte_always_inline void 596cc54096SNithin Dabilpuram gen_key_snow3g(uint8_t *ck, uint32_t *keyx) 606cc54096SNithin Dabilpuram { 616cc54096SNithin Dabilpuram int i, base; 626cc54096SNithin Dabilpuram 636cc54096SNithin Dabilpuram for (i = 0; i < 4; i++) { 646cc54096SNithin Dabilpuram base = 4 * i; 656cc54096SNithin Dabilpuram keyx[3 - i] = (ck[base] << 24) | (ck[base + 1] << 16) | 666cc54096SNithin Dabilpuram (ck[base + 2] << 8) | (ck[base + 3]); 676cc54096SNithin Dabilpuram keyx[3 - i] = rte_cpu_to_be_32(keyx[3 - i]); 686cc54096SNithin Dabilpuram } 696cc54096SNithin Dabilpuram } 706cc54096SNithin Dabilpuram 71*b74652f3SRagothaman Jayaraman static __rte_always_inline void 72*b74652f3SRagothaman Jayaraman cpt_fc_salt_update(void *ctx, 73*b74652f3SRagothaman Jayaraman uint8_t *salt) 74*b74652f3SRagothaman Jayaraman { 75*b74652f3SRagothaman Jayaraman struct cpt_ctx *cpt_ctx = ctx; 76*b74652f3SRagothaman Jayaraman memcpy(&cpt_ctx->fctx.enc.encr_iv, salt, 4); 77*b74652f3SRagothaman Jayaraman } 78*b74652f3SRagothaman Jayaraman 796cc54096SNithin Dabilpuram static __rte_always_inline int 806cc54096SNithin Dabilpuram cpt_fc_ciph_validate_key_aes(uint16_t key_len) 816cc54096SNithin Dabilpuram { 826cc54096SNithin Dabilpuram switch (key_len) { 836cc54096SNithin Dabilpuram case CPT_BYTE_16: 846cc54096SNithin Dabilpuram case CPT_BYTE_24: 856cc54096SNithin Dabilpuram case CPT_BYTE_32: 866cc54096SNithin Dabilpuram return 0; 876cc54096SNithin Dabilpuram default: 886cc54096SNithin Dabilpuram return -1; 896cc54096SNithin Dabilpuram } 906cc54096SNithin Dabilpuram } 916cc54096SNithin Dabilpuram 926cc54096SNithin Dabilpuram static __rte_always_inline int 936cc54096SNithin Dabilpuram cpt_fc_ciph_validate_key(cipher_type_t type, struct cpt_ctx *cpt_ctx, 946cc54096SNithin Dabilpuram uint16_t key_len) 956cc54096SNithin Dabilpuram { 966cc54096SNithin Dabilpuram int fc_type = 0; 976cc54096SNithin Dabilpuram switch (type) { 986cc54096SNithin Dabilpuram case PASSTHROUGH: 996cc54096SNithin Dabilpuram fc_type = FC_GEN; 1006cc54096SNithin Dabilpuram break; 1016cc54096SNithin Dabilpuram case DES3_CBC: 1026cc54096SNithin Dabilpuram case DES3_ECB: 1036cc54096SNithin Dabilpuram fc_type = FC_GEN; 1046cc54096SNithin Dabilpuram break; 1056cc54096SNithin Dabilpuram case AES_CBC: 1066cc54096SNithin Dabilpuram case AES_ECB: 1076cc54096SNithin Dabilpuram case AES_CFB: 1086cc54096SNithin Dabilpuram case AES_CTR: 1096cc54096SNithin Dabilpuram case AES_GCM: 1106cc54096SNithin Dabilpuram if (unlikely(cpt_fc_ciph_validate_key_aes(key_len) != 0)) 1116cc54096SNithin Dabilpuram return -1; 1126cc54096SNithin Dabilpuram fc_type = FC_GEN; 1136cc54096SNithin Dabilpuram break; 1146cc54096SNithin Dabilpuram case AES_XTS: 1156cc54096SNithin Dabilpuram key_len = key_len / 2; 1166cc54096SNithin Dabilpuram if (unlikely(key_len == CPT_BYTE_24)) { 1176cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Invalid AES key len for XTS"); 1186cc54096SNithin Dabilpuram return -1; 1196cc54096SNithin Dabilpuram } 1206cc54096SNithin Dabilpuram if (unlikely(cpt_fc_ciph_validate_key_aes(key_len) != 0)) 1216cc54096SNithin Dabilpuram return -1; 1226cc54096SNithin Dabilpuram fc_type = FC_GEN; 1236cc54096SNithin Dabilpuram break; 1246cc54096SNithin Dabilpuram case ZUC_EEA3: 1256cc54096SNithin Dabilpuram case SNOW3G_UEA2: 1266cc54096SNithin Dabilpuram if (unlikely(key_len != 16)) 1276cc54096SNithin Dabilpuram return -1; 1286cc54096SNithin Dabilpuram /* No support for AEAD yet */ 1296cc54096SNithin Dabilpuram if (unlikely(cpt_ctx->hash_type)) 1306cc54096SNithin Dabilpuram return -1; 1316cc54096SNithin Dabilpuram fc_type = ZUC_SNOW3G; 1326cc54096SNithin Dabilpuram break; 1336cc54096SNithin Dabilpuram case KASUMI_F8_CBC: 1346cc54096SNithin Dabilpuram case KASUMI_F8_ECB: 1356cc54096SNithin Dabilpuram if (unlikely(key_len != 16)) 1366cc54096SNithin Dabilpuram return -1; 1376cc54096SNithin Dabilpuram /* No support for AEAD yet */ 1386cc54096SNithin Dabilpuram if (unlikely(cpt_ctx->hash_type)) 1396cc54096SNithin Dabilpuram return -1; 1406cc54096SNithin Dabilpuram fc_type = KASUMI; 1416cc54096SNithin Dabilpuram break; 1426cc54096SNithin Dabilpuram default: 1436cc54096SNithin Dabilpuram return -1; 1446cc54096SNithin Dabilpuram } 1456cc54096SNithin Dabilpuram return fc_type; 1466cc54096SNithin Dabilpuram } 1476cc54096SNithin Dabilpuram 1486cc54096SNithin Dabilpuram static __rte_always_inline void 1496cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_passthrough(struct cpt_ctx *cpt_ctx, mc_fc_context_t *fctx) 1506cc54096SNithin Dabilpuram { 1516cc54096SNithin Dabilpuram cpt_ctx->enc_cipher = 0; 1526cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).enc_cipher = 0; 1536cc54096SNithin Dabilpuram } 1546cc54096SNithin Dabilpuram 1556cc54096SNithin Dabilpuram static __rte_always_inline void 1566cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_set_aes_key_type(mc_fc_context_t *fctx, uint16_t key_len) 1576cc54096SNithin Dabilpuram { 1586cc54096SNithin Dabilpuram mc_aes_type_t aes_key_type = 0; 1596cc54096SNithin Dabilpuram switch (key_len) { 1606cc54096SNithin Dabilpuram case CPT_BYTE_16: 1616cc54096SNithin Dabilpuram aes_key_type = AES_128_BIT; 1626cc54096SNithin Dabilpuram break; 1636cc54096SNithin Dabilpuram case CPT_BYTE_24: 1646cc54096SNithin Dabilpuram aes_key_type = AES_192_BIT; 1656cc54096SNithin Dabilpuram break; 1666cc54096SNithin Dabilpuram case CPT_BYTE_32: 1676cc54096SNithin Dabilpuram aes_key_type = AES_256_BIT; 1686cc54096SNithin Dabilpuram break; 1696cc54096SNithin Dabilpuram default: 1706cc54096SNithin Dabilpuram /* This should not happen */ 1716cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Invalid AES key len"); 1726cc54096SNithin Dabilpuram return; 1736cc54096SNithin Dabilpuram } 1746cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).aes_key = aes_key_type; 1756cc54096SNithin Dabilpuram } 1766cc54096SNithin Dabilpuram 1776cc54096SNithin Dabilpuram static __rte_always_inline void 1786cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_snow3g_uea2(struct cpt_ctx *cpt_ctx, uint8_t *key, 1796cc54096SNithin Dabilpuram uint16_t key_len) 1806cc54096SNithin Dabilpuram { 1816cc54096SNithin Dabilpuram uint32_t keyx[4]; 1826cc54096SNithin Dabilpuram cpt_ctx->snow3g = 1; 1836cc54096SNithin Dabilpuram gen_key_snow3g(key, keyx); 1846cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.ci_key, keyx, key_len); 1856cc54096SNithin Dabilpuram cpt_ctx->fc_type = ZUC_SNOW3G; 1866cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0; 1876cc54096SNithin Dabilpuram } 1886cc54096SNithin Dabilpuram 1896cc54096SNithin Dabilpuram static __rte_always_inline void 1906cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_zuc_eea3(struct cpt_ctx *cpt_ctx, uint8_t *key, 1916cc54096SNithin Dabilpuram uint16_t key_len) 1926cc54096SNithin Dabilpuram { 1936cc54096SNithin Dabilpuram cpt_ctx->snow3g = 0; 1946cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.ci_key, key, key_len); 1956cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.zuc_const, zuc_d, 32); 1966cc54096SNithin Dabilpuram cpt_ctx->fc_type = ZUC_SNOW3G; 1976cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0; 1986cc54096SNithin Dabilpuram } 1996cc54096SNithin Dabilpuram 2006cc54096SNithin Dabilpuram static __rte_always_inline void 2016cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_kasumi_f8_ecb(struct cpt_ctx *cpt_ctx, uint8_t *key, 2026cc54096SNithin Dabilpuram uint16_t key_len) 2036cc54096SNithin Dabilpuram { 2046cc54096SNithin Dabilpuram cpt_ctx->k_ecb = 1; 2056cc54096SNithin Dabilpuram memcpy(cpt_ctx->k_ctx.ci_key, key, key_len); 2066cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0; 2076cc54096SNithin Dabilpuram cpt_ctx->fc_type = KASUMI; 2086cc54096SNithin Dabilpuram } 2096cc54096SNithin Dabilpuram 2106cc54096SNithin Dabilpuram static __rte_always_inline void 2116cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_kasumi_f8_cbc(struct cpt_ctx *cpt_ctx, uint8_t *key, 2126cc54096SNithin Dabilpuram uint16_t key_len) 2136cc54096SNithin Dabilpuram { 2146cc54096SNithin Dabilpuram memcpy(cpt_ctx->k_ctx.ci_key, key, key_len); 2156cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0; 2166cc54096SNithin Dabilpuram cpt_ctx->fc_type = KASUMI; 2176cc54096SNithin Dabilpuram } 2186cc54096SNithin Dabilpuram 2196cc54096SNithin Dabilpuram static __rte_always_inline int 2206cc54096SNithin Dabilpuram cpt_fc_ciph_set_key(void *ctx, cipher_type_t type, uint8_t *key, 2216cc54096SNithin Dabilpuram uint16_t key_len, uint8_t *salt) 2226cc54096SNithin Dabilpuram { 2236cc54096SNithin Dabilpuram struct cpt_ctx *cpt_ctx = ctx; 2246cc54096SNithin Dabilpuram mc_fc_context_t *fctx = &cpt_ctx->fctx; 2256cc54096SNithin Dabilpuram uint64_t *ctrl_flags = NULL; 2266cc54096SNithin Dabilpuram int fc_type; 2276cc54096SNithin Dabilpuram 2286cc54096SNithin Dabilpuram /* Validate key before proceeding */ 2296cc54096SNithin Dabilpuram fc_type = cpt_fc_ciph_validate_key(type, cpt_ctx, key_len); 2306cc54096SNithin Dabilpuram if (unlikely(fc_type == -1)) 2316cc54096SNithin Dabilpuram return -1; 2326cc54096SNithin Dabilpuram 2336cc54096SNithin Dabilpuram if (fc_type == FC_GEN) { 2346cc54096SNithin Dabilpuram cpt_ctx->fc_type = FC_GEN; 2356cc54096SNithin Dabilpuram ctrl_flags = (uint64_t *)&(fctx->enc.enc_ctrl.flags); 2366cc54096SNithin Dabilpuram *ctrl_flags = rte_be_to_cpu_64(*ctrl_flags); 2376cc54096SNithin Dabilpuram /* 2386cc54096SNithin Dabilpuram * We need to always say IV is from DPTR as user can 2396cc54096SNithin Dabilpuram * sometimes iverride IV per operation. 2406cc54096SNithin Dabilpuram */ 2416cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).iv_source = CPT_FROM_DPTR; 2426cc54096SNithin Dabilpuram } 2436cc54096SNithin Dabilpuram 2446cc54096SNithin Dabilpuram switch (type) { 2456cc54096SNithin Dabilpuram case PASSTHROUGH: 2466cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_passthrough(cpt_ctx, fctx); 2476cc54096SNithin Dabilpuram goto fc_success; 2486cc54096SNithin Dabilpuram case DES3_CBC: 2496cc54096SNithin Dabilpuram /* CPT performs DES using 3DES with the 8B DES-key 2506cc54096SNithin Dabilpuram * replicated 2 more times to match the 24B 3DES-key. 2516cc54096SNithin Dabilpuram * Eg. If org. key is "0x0a 0x0b", then new key is 2526cc54096SNithin Dabilpuram * "0x0a 0x0b 0x0a 0x0b 0x0a 0x0b" 2536cc54096SNithin Dabilpuram */ 2546cc54096SNithin Dabilpuram if (key_len == 8) { 2556cc54096SNithin Dabilpuram /* Skipping the first 8B as it will be copied 2566cc54096SNithin Dabilpuram * in the regular code flow 2576cc54096SNithin Dabilpuram */ 2586cc54096SNithin Dabilpuram memcpy(fctx->enc.encr_key+key_len, key, key_len); 2596cc54096SNithin Dabilpuram memcpy(fctx->enc.encr_key+2*key_len, key, key_len); 2606cc54096SNithin Dabilpuram } 2616cc54096SNithin Dabilpuram break; 2626cc54096SNithin Dabilpuram case DES3_ECB: 2636cc54096SNithin Dabilpuram /* For DES3_ECB IV need to be from CTX. */ 2646cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).iv_source = CPT_FROM_CTX; 2656cc54096SNithin Dabilpuram break; 2666cc54096SNithin Dabilpuram case AES_CBC: 2676cc54096SNithin Dabilpuram case AES_ECB: 2686cc54096SNithin Dabilpuram case AES_CFB: 2696cc54096SNithin Dabilpuram case AES_CTR: 2706cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_set_aes_key_type(fctx, key_len); 2716cc54096SNithin Dabilpuram break; 2726cc54096SNithin Dabilpuram case AES_GCM: 2736cc54096SNithin Dabilpuram /* Even though iv source is from dptr, 2746cc54096SNithin Dabilpuram * aes_gcm salt is taken from ctx 2756cc54096SNithin Dabilpuram */ 2766cc54096SNithin Dabilpuram if (salt) { 2776cc54096SNithin Dabilpuram memcpy(fctx->enc.encr_iv, salt, 4); 2786cc54096SNithin Dabilpuram /* Assuming it was just salt update 2796cc54096SNithin Dabilpuram * and nothing else 2806cc54096SNithin Dabilpuram */ 2816cc54096SNithin Dabilpuram if (!key) 2826cc54096SNithin Dabilpuram goto fc_success; 2836cc54096SNithin Dabilpuram } 2846cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_set_aes_key_type(fctx, key_len); 2856cc54096SNithin Dabilpuram break; 2866cc54096SNithin Dabilpuram case AES_XTS: 2876cc54096SNithin Dabilpuram key_len = key_len / 2; 2886cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_set_aes_key_type(fctx, key_len); 2896cc54096SNithin Dabilpuram 2906cc54096SNithin Dabilpuram /* Copy key2 for XTS into ipad */ 2916cc54096SNithin Dabilpuram memset(fctx->hmac.ipad, 0, sizeof(fctx->hmac.ipad)); 2926cc54096SNithin Dabilpuram memcpy(fctx->hmac.ipad, &key[key_len], key_len); 2936cc54096SNithin Dabilpuram break; 2946cc54096SNithin Dabilpuram case SNOW3G_UEA2: 2956cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_snow3g_uea2(cpt_ctx, key, key_len); 2966cc54096SNithin Dabilpuram goto success; 2976cc54096SNithin Dabilpuram case ZUC_EEA3: 2986cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_zuc_eea3(cpt_ctx, key, key_len); 2996cc54096SNithin Dabilpuram goto success; 3006cc54096SNithin Dabilpuram case KASUMI_F8_ECB: 3016cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_kasumi_f8_ecb(cpt_ctx, key, key_len); 3026cc54096SNithin Dabilpuram goto success; 3036cc54096SNithin Dabilpuram case KASUMI_F8_CBC: 3046cc54096SNithin Dabilpuram cpt_fc_ciph_set_key_kasumi_f8_cbc(cpt_ctx, key, key_len); 3056cc54096SNithin Dabilpuram goto success; 3066cc54096SNithin Dabilpuram default: 3076cc54096SNithin Dabilpuram break; 3086cc54096SNithin Dabilpuram } 3096cc54096SNithin Dabilpuram 3106cc54096SNithin Dabilpuram /* Only for FC_GEN case */ 3116cc54096SNithin Dabilpuram 3126cc54096SNithin Dabilpuram /* For GMAC auth, cipher must be NULL */ 3136cc54096SNithin Dabilpuram if (cpt_ctx->hash_type != GMAC_TYPE) 3146cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).enc_cipher = type; 3156cc54096SNithin Dabilpuram 3166cc54096SNithin Dabilpuram memcpy(fctx->enc.encr_key, key, key_len); 3176cc54096SNithin Dabilpuram 3186cc54096SNithin Dabilpuram fc_success: 3196cc54096SNithin Dabilpuram *ctrl_flags = rte_cpu_to_be_64(*ctrl_flags); 3206cc54096SNithin Dabilpuram 3216cc54096SNithin Dabilpuram success: 3226cc54096SNithin Dabilpuram cpt_ctx->enc_cipher = type; 3236cc54096SNithin Dabilpuram 3246cc54096SNithin Dabilpuram return 0; 3256cc54096SNithin Dabilpuram } 3266cc54096SNithin Dabilpuram 327*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 328*b74652f3SRagothaman Jayaraman fill_sg_comp(sg_comp_t *list, 329*b74652f3SRagothaman Jayaraman uint32_t i, 330*b74652f3SRagothaman Jayaraman phys_addr_t dma_addr, 331*b74652f3SRagothaman Jayaraman uint32_t size) 332*b74652f3SRagothaman Jayaraman { 333*b74652f3SRagothaman Jayaraman sg_comp_t *to = &list[i>>2]; 334*b74652f3SRagothaman Jayaraman 335*b74652f3SRagothaman Jayaraman to->u.s.len[i%4] = rte_cpu_to_be_16(size); 336*b74652f3SRagothaman Jayaraman to->ptr[i%4] = rte_cpu_to_be_64(dma_addr); 337*b74652f3SRagothaman Jayaraman i++; 338*b74652f3SRagothaman Jayaraman return i; 339*b74652f3SRagothaman Jayaraman } 340*b74652f3SRagothaman Jayaraman 341*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 342*b74652f3SRagothaman Jayaraman fill_sg_comp_from_buf(sg_comp_t *list, 343*b74652f3SRagothaman Jayaraman uint32_t i, 344*b74652f3SRagothaman Jayaraman buf_ptr_t *from) 345*b74652f3SRagothaman Jayaraman { 346*b74652f3SRagothaman Jayaraman sg_comp_t *to = &list[i>>2]; 347*b74652f3SRagothaman Jayaraman 348*b74652f3SRagothaman Jayaraman to->u.s.len[i%4] = rte_cpu_to_be_16(from->size); 349*b74652f3SRagothaman Jayaraman to->ptr[i%4] = rte_cpu_to_be_64(from->dma_addr); 350*b74652f3SRagothaman Jayaraman i++; 351*b74652f3SRagothaman Jayaraman return i; 352*b74652f3SRagothaman Jayaraman } 353*b74652f3SRagothaman Jayaraman 354*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 355*b74652f3SRagothaman Jayaraman fill_sg_comp_from_buf_min(sg_comp_t *list, 356*b74652f3SRagothaman Jayaraman uint32_t i, 357*b74652f3SRagothaman Jayaraman buf_ptr_t *from, 358*b74652f3SRagothaman Jayaraman uint32_t *psize) 359*b74652f3SRagothaman Jayaraman { 360*b74652f3SRagothaman Jayaraman sg_comp_t *to = &list[i >> 2]; 361*b74652f3SRagothaman Jayaraman uint32_t size = *psize; 362*b74652f3SRagothaman Jayaraman uint32_t e_len; 363*b74652f3SRagothaman Jayaraman 364*b74652f3SRagothaman Jayaraman e_len = (size > from->size) ? from->size : size; 365*b74652f3SRagothaman Jayaraman to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len); 366*b74652f3SRagothaman Jayaraman to->ptr[i % 4] = rte_cpu_to_be_64(from->dma_addr); 367*b74652f3SRagothaman Jayaraman *psize -= e_len; 368*b74652f3SRagothaman Jayaraman i++; 369*b74652f3SRagothaman Jayaraman return i; 370*b74652f3SRagothaman Jayaraman } 371*b74652f3SRagothaman Jayaraman 372*b74652f3SRagothaman Jayaraman /* 373*b74652f3SRagothaman Jayaraman * This fills the MC expected SGIO list 374*b74652f3SRagothaman Jayaraman * from IOV given by user. 375*b74652f3SRagothaman Jayaraman */ 376*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 377*b74652f3SRagothaman Jayaraman fill_sg_comp_from_iov(sg_comp_t *list, 378*b74652f3SRagothaman Jayaraman uint32_t i, 379*b74652f3SRagothaman Jayaraman iov_ptr_t *from, uint32_t from_offset, 380*b74652f3SRagothaman Jayaraman uint32_t *psize, buf_ptr_t *extra_buf, 381*b74652f3SRagothaman Jayaraman uint32_t extra_offset) 382*b74652f3SRagothaman Jayaraman { 383*b74652f3SRagothaman Jayaraman int32_t j; 384*b74652f3SRagothaman Jayaraman uint32_t extra_len = extra_buf ? extra_buf->size : 0; 385*b74652f3SRagothaman Jayaraman uint32_t size = *psize - extra_len; 386*b74652f3SRagothaman Jayaraman buf_ptr_t *bufs; 387*b74652f3SRagothaman Jayaraman 388*b74652f3SRagothaman Jayaraman bufs = from->bufs; 389*b74652f3SRagothaman Jayaraman for (j = 0; (j < from->buf_cnt) && size; j++) { 390*b74652f3SRagothaman Jayaraman phys_addr_t e_dma_addr; 391*b74652f3SRagothaman Jayaraman uint32_t e_len; 392*b74652f3SRagothaman Jayaraman sg_comp_t *to = &list[i >> 2]; 393*b74652f3SRagothaman Jayaraman 394*b74652f3SRagothaman Jayaraman if (!bufs[j].size) 395*b74652f3SRagothaman Jayaraman continue; 396*b74652f3SRagothaman Jayaraman 397*b74652f3SRagothaman Jayaraman if (unlikely(from_offset)) { 398*b74652f3SRagothaman Jayaraman if (from_offset >= bufs[j].size) { 399*b74652f3SRagothaman Jayaraman from_offset -= bufs[j].size; 400*b74652f3SRagothaman Jayaraman continue; 401*b74652f3SRagothaman Jayaraman } 402*b74652f3SRagothaman Jayaraman e_dma_addr = bufs[j].dma_addr + from_offset; 403*b74652f3SRagothaman Jayaraman e_len = (size > (bufs[j].size - from_offset)) ? 404*b74652f3SRagothaman Jayaraman (bufs[j].size - from_offset) : size; 405*b74652f3SRagothaman Jayaraman from_offset = 0; 406*b74652f3SRagothaman Jayaraman } else { 407*b74652f3SRagothaman Jayaraman e_dma_addr = bufs[j].dma_addr; 408*b74652f3SRagothaman Jayaraman e_len = (size > bufs[j].size) ? 409*b74652f3SRagothaman Jayaraman bufs[j].size : size; 410*b74652f3SRagothaman Jayaraman } 411*b74652f3SRagothaman Jayaraman 412*b74652f3SRagothaman Jayaraman to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len); 413*b74652f3SRagothaman Jayaraman to->ptr[i % 4] = rte_cpu_to_be_64(e_dma_addr); 414*b74652f3SRagothaman Jayaraman 415*b74652f3SRagothaman Jayaraman if (extra_len && (e_len >= extra_offset)) { 416*b74652f3SRagothaman Jayaraman /* Break the data at given offset */ 417*b74652f3SRagothaman Jayaraman uint32_t next_len = e_len - extra_offset; 418*b74652f3SRagothaman Jayaraman phys_addr_t next_dma = e_dma_addr + extra_offset; 419*b74652f3SRagothaman Jayaraman 420*b74652f3SRagothaman Jayaraman if (!extra_offset) { 421*b74652f3SRagothaman Jayaraman i--; 422*b74652f3SRagothaman Jayaraman } else { 423*b74652f3SRagothaman Jayaraman e_len = extra_offset; 424*b74652f3SRagothaman Jayaraman size -= e_len; 425*b74652f3SRagothaman Jayaraman to->u.s.len[i % 4] = rte_cpu_to_be_16(e_len); 426*b74652f3SRagothaman Jayaraman } 427*b74652f3SRagothaman Jayaraman 428*b74652f3SRagothaman Jayaraman /* Insert extra data ptr */ 429*b74652f3SRagothaman Jayaraman if (extra_len) { 430*b74652f3SRagothaman Jayaraman i++; 431*b74652f3SRagothaman Jayaraman to = &list[i >> 2]; 432*b74652f3SRagothaman Jayaraman to->u.s.len[i % 4] = 433*b74652f3SRagothaman Jayaraman rte_cpu_to_be_16(extra_buf->size); 434*b74652f3SRagothaman Jayaraman to->ptr[i % 4] = 435*b74652f3SRagothaman Jayaraman rte_cpu_to_be_64(extra_buf->dma_addr); 436*b74652f3SRagothaman Jayaraman 437*b74652f3SRagothaman Jayaraman /* size already decremented by extra len */ 438*b74652f3SRagothaman Jayaraman } 439*b74652f3SRagothaman Jayaraman 440*b74652f3SRagothaman Jayaraman /* insert the rest of the data */ 441*b74652f3SRagothaman Jayaraman if (next_len) { 442*b74652f3SRagothaman Jayaraman i++; 443*b74652f3SRagothaman Jayaraman to = &list[i >> 2]; 444*b74652f3SRagothaman Jayaraman to->u.s.len[i % 4] = rte_cpu_to_be_16(next_len); 445*b74652f3SRagothaman Jayaraman to->ptr[i % 4] = rte_cpu_to_be_64(next_dma); 446*b74652f3SRagothaman Jayaraman size -= next_len; 447*b74652f3SRagothaman Jayaraman } 448*b74652f3SRagothaman Jayaraman extra_len = 0; 449*b74652f3SRagothaman Jayaraman 450*b74652f3SRagothaman Jayaraman } else { 451*b74652f3SRagothaman Jayaraman size -= e_len; 452*b74652f3SRagothaman Jayaraman } 453*b74652f3SRagothaman Jayaraman if (extra_offset) 454*b74652f3SRagothaman Jayaraman extra_offset -= size; 455*b74652f3SRagothaman Jayaraman i++; 456*b74652f3SRagothaman Jayaraman } 457*b74652f3SRagothaman Jayaraman 458*b74652f3SRagothaman Jayaraman *psize = size; 459*b74652f3SRagothaman Jayaraman return (uint32_t)i; 460*b74652f3SRagothaman Jayaraman } 461*b74652f3SRagothaman Jayaraman 462*b74652f3SRagothaman Jayaraman static __rte_always_inline int 463*b74652f3SRagothaman Jayaraman cpt_enc_hmac_prep(uint32_t flags, 464*b74652f3SRagothaman Jayaraman uint64_t d_offs, 465*b74652f3SRagothaman Jayaraman uint64_t d_lens, 466*b74652f3SRagothaman Jayaraman fc_params_t *fc_params, 467*b74652f3SRagothaman Jayaraman void *op, 468*b74652f3SRagothaman Jayaraman void **prep_req) 469*b74652f3SRagothaman Jayaraman { 470*b74652f3SRagothaman Jayaraman uint32_t iv_offset = 0; 471*b74652f3SRagothaman Jayaraman int32_t inputlen, outputlen, enc_dlen, auth_dlen; 472*b74652f3SRagothaman Jayaraman struct cpt_ctx *cpt_ctx; 473*b74652f3SRagothaman Jayaraman uint32_t cipher_type, hash_type; 474*b74652f3SRagothaman Jayaraman uint32_t mac_len, size; 475*b74652f3SRagothaman Jayaraman uint8_t iv_len = 16; 476*b74652f3SRagothaman Jayaraman struct cpt_request_info *req; 477*b74652f3SRagothaman Jayaraman buf_ptr_t *meta_p, *aad_buf = NULL; 478*b74652f3SRagothaman Jayaraman uint32_t encr_offset, auth_offset; 479*b74652f3SRagothaman Jayaraman uint32_t encr_data_len, auth_data_len, aad_len = 0; 480*b74652f3SRagothaman Jayaraman uint32_t passthrough_len = 0; 481*b74652f3SRagothaman Jayaraman void *m_vaddr, *offset_vaddr; 482*b74652f3SRagothaman Jayaraman uint64_t m_dma, offset_dma, ctx_dma; 483*b74652f3SRagothaman Jayaraman vq_cmd_word0_t vq_cmd_w0; 484*b74652f3SRagothaman Jayaraman vq_cmd_word3_t vq_cmd_w3; 485*b74652f3SRagothaman Jayaraman void *c_vaddr; 486*b74652f3SRagothaman Jayaraman uint64_t c_dma; 487*b74652f3SRagothaman Jayaraman int32_t m_size; 488*b74652f3SRagothaman Jayaraman opcode_info_t opcode; 489*b74652f3SRagothaman Jayaraman 490*b74652f3SRagothaman Jayaraman meta_p = &fc_params->meta_buf; 491*b74652f3SRagothaman Jayaraman m_vaddr = meta_p->vaddr; 492*b74652f3SRagothaman Jayaraman m_dma = meta_p->dma_addr; 493*b74652f3SRagothaman Jayaraman m_size = meta_p->size; 494*b74652f3SRagothaman Jayaraman 495*b74652f3SRagothaman Jayaraman encr_offset = ENCR_OFFSET(d_offs); 496*b74652f3SRagothaman Jayaraman auth_offset = AUTH_OFFSET(d_offs); 497*b74652f3SRagothaman Jayaraman encr_data_len = ENCR_DLEN(d_lens); 498*b74652f3SRagothaman Jayaraman auth_data_len = AUTH_DLEN(d_lens); 499*b74652f3SRagothaman Jayaraman if (unlikely(flags & VALID_AAD_BUF)) { 500*b74652f3SRagothaman Jayaraman /* 501*b74652f3SRagothaman Jayaraman * We dont support both aad 502*b74652f3SRagothaman Jayaraman * and auth data separately 503*b74652f3SRagothaman Jayaraman */ 504*b74652f3SRagothaman Jayaraman auth_data_len = 0; 505*b74652f3SRagothaman Jayaraman auth_offset = 0; 506*b74652f3SRagothaman Jayaraman aad_len = fc_params->aad_buf.size; 507*b74652f3SRagothaman Jayaraman aad_buf = &fc_params->aad_buf; 508*b74652f3SRagothaman Jayaraman } 509*b74652f3SRagothaman Jayaraman cpt_ctx = fc_params->ctx_buf.vaddr; 510*b74652f3SRagothaman Jayaraman cipher_type = cpt_ctx->enc_cipher; 511*b74652f3SRagothaman Jayaraman hash_type = cpt_ctx->hash_type; 512*b74652f3SRagothaman Jayaraman mac_len = cpt_ctx->mac_len; 513*b74652f3SRagothaman Jayaraman 514*b74652f3SRagothaman Jayaraman /* 515*b74652f3SRagothaman Jayaraman * Save initial space that followed app data for completion code & 516*b74652f3SRagothaman Jayaraman * alternate completion code to fall in same cache line as app data 517*b74652f3SRagothaman Jayaraman */ 518*b74652f3SRagothaman Jayaraman m_vaddr = (uint8_t *)m_vaddr + COMPLETION_CODE_SIZE; 519*b74652f3SRagothaman Jayaraman m_dma += COMPLETION_CODE_SIZE; 520*b74652f3SRagothaman Jayaraman size = (uint8_t *)RTE_PTR_ALIGN((uint8_t *)m_vaddr, 16) - 521*b74652f3SRagothaman Jayaraman (uint8_t *)m_vaddr; 522*b74652f3SRagothaman Jayaraman 523*b74652f3SRagothaman Jayaraman c_vaddr = (uint8_t *)m_vaddr + size; 524*b74652f3SRagothaman Jayaraman c_dma = m_dma + size; 525*b74652f3SRagothaman Jayaraman size += sizeof(cpt_res_s_t); 526*b74652f3SRagothaman Jayaraman 527*b74652f3SRagothaman Jayaraman m_vaddr = (uint8_t *)m_vaddr + size; 528*b74652f3SRagothaman Jayaraman m_dma += size; 529*b74652f3SRagothaman Jayaraman m_size -= size; 530*b74652f3SRagothaman Jayaraman 531*b74652f3SRagothaman Jayaraman /* start cpt request info struct at 8 byte boundary */ 532*b74652f3SRagothaman Jayaraman size = (uint8_t *)RTE_PTR_ALIGN(m_vaddr, 8) - 533*b74652f3SRagothaman Jayaraman (uint8_t *)m_vaddr; 534*b74652f3SRagothaman Jayaraman 535*b74652f3SRagothaman Jayaraman req = (struct cpt_request_info *)((uint8_t *)m_vaddr + size); 536*b74652f3SRagothaman Jayaraman 537*b74652f3SRagothaman Jayaraman size += sizeof(struct cpt_request_info); 538*b74652f3SRagothaman Jayaraman m_vaddr = (uint8_t *)m_vaddr + size; 539*b74652f3SRagothaman Jayaraman m_dma += size; 540*b74652f3SRagothaman Jayaraman m_size -= size; 541*b74652f3SRagothaman Jayaraman 542*b74652f3SRagothaman Jayaraman if (hash_type == GMAC_TYPE) 543*b74652f3SRagothaman Jayaraman encr_data_len = 0; 544*b74652f3SRagothaman Jayaraman 545*b74652f3SRagothaman Jayaraman if (unlikely(!(flags & VALID_IV_BUF))) { 546*b74652f3SRagothaman Jayaraman iv_len = 0; 547*b74652f3SRagothaman Jayaraman iv_offset = ENCR_IV_OFFSET(d_offs); 548*b74652f3SRagothaman Jayaraman } 549*b74652f3SRagothaman Jayaraman 550*b74652f3SRagothaman Jayaraman if (unlikely(flags & VALID_AAD_BUF)) { 551*b74652f3SRagothaman Jayaraman /* 552*b74652f3SRagothaman Jayaraman * When AAD is given, data above encr_offset is pass through 553*b74652f3SRagothaman Jayaraman * Since AAD is given as separate pointer and not as offset, 554*b74652f3SRagothaman Jayaraman * this is a special case as we need to fragment input data 555*b74652f3SRagothaman Jayaraman * into passthrough + encr_data and then insert AAD in between. 556*b74652f3SRagothaman Jayaraman */ 557*b74652f3SRagothaman Jayaraman if (hash_type != GMAC_TYPE) { 558*b74652f3SRagothaman Jayaraman passthrough_len = encr_offset; 559*b74652f3SRagothaman Jayaraman auth_offset = passthrough_len + iv_len; 560*b74652f3SRagothaman Jayaraman encr_offset = passthrough_len + aad_len + iv_len; 561*b74652f3SRagothaman Jayaraman auth_data_len = aad_len + encr_data_len; 562*b74652f3SRagothaman Jayaraman } else { 563*b74652f3SRagothaman Jayaraman passthrough_len = 16 + aad_len; 564*b74652f3SRagothaman Jayaraman auth_offset = passthrough_len + iv_len; 565*b74652f3SRagothaman Jayaraman auth_data_len = aad_len; 566*b74652f3SRagothaman Jayaraman } 567*b74652f3SRagothaman Jayaraman } else { 568*b74652f3SRagothaman Jayaraman encr_offset += iv_len; 569*b74652f3SRagothaman Jayaraman auth_offset += iv_len; 570*b74652f3SRagothaman Jayaraman } 571*b74652f3SRagothaman Jayaraman 572*b74652f3SRagothaman Jayaraman /* Encryption */ 573*b74652f3SRagothaman Jayaraman opcode.s.major = CPT_MAJOR_OP_FC; 574*b74652f3SRagothaman Jayaraman opcode.s.minor = 0; 575*b74652f3SRagothaman Jayaraman 576*b74652f3SRagothaman Jayaraman auth_dlen = auth_offset + auth_data_len; 577*b74652f3SRagothaman Jayaraman enc_dlen = encr_data_len + encr_offset; 578*b74652f3SRagothaman Jayaraman if (unlikely(encr_data_len & 0xf)) { 579*b74652f3SRagothaman Jayaraman if ((cipher_type == DES3_CBC) || (cipher_type == DES3_ECB)) 580*b74652f3SRagothaman Jayaraman enc_dlen = ROUNDUP8(encr_data_len) + encr_offset; 581*b74652f3SRagothaman Jayaraman else if (likely((cipher_type == AES_CBC) || 582*b74652f3SRagothaman Jayaraman (cipher_type == AES_ECB))) 583*b74652f3SRagothaman Jayaraman enc_dlen = ROUNDUP16(encr_data_len) + encr_offset; 584*b74652f3SRagothaman Jayaraman } 585*b74652f3SRagothaman Jayaraman 586*b74652f3SRagothaman Jayaraman if (unlikely(hash_type == GMAC_TYPE)) { 587*b74652f3SRagothaman Jayaraman encr_offset = auth_dlen; 588*b74652f3SRagothaman Jayaraman enc_dlen = 0; 589*b74652f3SRagothaman Jayaraman } 590*b74652f3SRagothaman Jayaraman 591*b74652f3SRagothaman Jayaraman if (unlikely(auth_dlen > enc_dlen)) { 592*b74652f3SRagothaman Jayaraman inputlen = auth_dlen; 593*b74652f3SRagothaman Jayaraman outputlen = auth_dlen + mac_len; 594*b74652f3SRagothaman Jayaraman } else { 595*b74652f3SRagothaman Jayaraman inputlen = enc_dlen; 596*b74652f3SRagothaman Jayaraman outputlen = enc_dlen + mac_len; 597*b74652f3SRagothaman Jayaraman } 598*b74652f3SRagothaman Jayaraman 599*b74652f3SRagothaman Jayaraman /* GP op header */ 600*b74652f3SRagothaman Jayaraman vq_cmd_w0.u64 = 0; 601*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.param1 = rte_cpu_to_be_16(encr_data_len); 602*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.param2 = rte_cpu_to_be_16(auth_data_len); 603*b74652f3SRagothaman Jayaraman /* 604*b74652f3SRagothaman Jayaraman * In 83XX since we have a limitation of 605*b74652f3SRagothaman Jayaraman * IV & Offset control word not part of instruction 606*b74652f3SRagothaman Jayaraman * and need to be part of Data Buffer, we check if 607*b74652f3SRagothaman Jayaraman * head room is there and then only do the Direct mode processing 608*b74652f3SRagothaman Jayaraman */ 609*b74652f3SRagothaman Jayaraman if (likely((flags & SINGLE_BUF_INPLACE) && 610*b74652f3SRagothaman Jayaraman (flags & SINGLE_BUF_HEADTAILROOM))) { 611*b74652f3SRagothaman Jayaraman void *dm_vaddr = fc_params->bufs[0].vaddr; 612*b74652f3SRagothaman Jayaraman uint64_t dm_dma_addr = fc_params->bufs[0].dma_addr; 613*b74652f3SRagothaman Jayaraman /* 614*b74652f3SRagothaman Jayaraman * This flag indicates that there is 24 bytes head room and 615*b74652f3SRagothaman Jayaraman * 8 bytes tail room available, so that we get to do 616*b74652f3SRagothaman Jayaraman * DIRECT MODE with limitation 617*b74652f3SRagothaman Jayaraman */ 618*b74652f3SRagothaman Jayaraman 619*b74652f3SRagothaman Jayaraman offset_vaddr = (uint8_t *)dm_vaddr - OFF_CTRL_LEN - iv_len; 620*b74652f3SRagothaman Jayaraman offset_dma = dm_dma_addr - OFF_CTRL_LEN - iv_len; 621*b74652f3SRagothaman Jayaraman 622*b74652f3SRagothaman Jayaraman /* DPTR */ 623*b74652f3SRagothaman Jayaraman req->ist.ei1 = offset_dma; 624*b74652f3SRagothaman Jayaraman /* RPTR should just exclude offset control word */ 625*b74652f3SRagothaman Jayaraman req->ist.ei2 = dm_dma_addr - iv_len; 626*b74652f3SRagothaman Jayaraman req->alternate_caddr = (uint64_t *)((uint8_t *)dm_vaddr 627*b74652f3SRagothaman Jayaraman + outputlen - iv_len); 628*b74652f3SRagothaman Jayaraman 629*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.dlen = rte_cpu_to_be_16(inputlen + OFF_CTRL_LEN); 630*b74652f3SRagothaman Jayaraman 631*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.opcode = rte_cpu_to_be_16(opcode.flags); 632*b74652f3SRagothaman Jayaraman 633*b74652f3SRagothaman Jayaraman if (likely(iv_len)) { 634*b74652f3SRagothaman Jayaraman uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr 635*b74652f3SRagothaman Jayaraman + OFF_CTRL_LEN); 636*b74652f3SRagothaman Jayaraman uint64_t *src = fc_params->iv_buf; 637*b74652f3SRagothaman Jayaraman dest[0] = src[0]; 638*b74652f3SRagothaman Jayaraman dest[1] = src[1]; 639*b74652f3SRagothaman Jayaraman } 640*b74652f3SRagothaman Jayaraman 641*b74652f3SRagothaman Jayaraman *(uint64_t *)offset_vaddr = 642*b74652f3SRagothaman Jayaraman rte_cpu_to_be_64(((uint64_t)encr_offset << 16) | 643*b74652f3SRagothaman Jayaraman ((uint64_t)iv_offset << 8) | 644*b74652f3SRagothaman Jayaraman ((uint64_t)auth_offset)); 645*b74652f3SRagothaman Jayaraman 646*b74652f3SRagothaman Jayaraman } else { 647*b74652f3SRagothaman Jayaraman uint32_t i, g_size_bytes, s_size_bytes; 648*b74652f3SRagothaman Jayaraman uint64_t dptr_dma, rptr_dma; 649*b74652f3SRagothaman Jayaraman sg_comp_t *gather_comp; 650*b74652f3SRagothaman Jayaraman sg_comp_t *scatter_comp; 651*b74652f3SRagothaman Jayaraman uint8_t *in_buffer; 652*b74652f3SRagothaman Jayaraman 653*b74652f3SRagothaman Jayaraman /* This falls under strict SG mode */ 654*b74652f3SRagothaman Jayaraman offset_vaddr = m_vaddr; 655*b74652f3SRagothaman Jayaraman offset_dma = m_dma; 656*b74652f3SRagothaman Jayaraman size = OFF_CTRL_LEN + iv_len; 657*b74652f3SRagothaman Jayaraman 658*b74652f3SRagothaman Jayaraman m_vaddr = (uint8_t *)m_vaddr + size; 659*b74652f3SRagothaman Jayaraman m_dma += size; 660*b74652f3SRagothaman Jayaraman m_size -= size; 661*b74652f3SRagothaman Jayaraman 662*b74652f3SRagothaman Jayaraman opcode.s.major |= CPT_DMA_MODE; 663*b74652f3SRagothaman Jayaraman 664*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.opcode = rte_cpu_to_be_16(opcode.flags); 665*b74652f3SRagothaman Jayaraman 666*b74652f3SRagothaman Jayaraman if (likely(iv_len)) { 667*b74652f3SRagothaman Jayaraman uint64_t *dest = (uint64_t *)((uint8_t *)offset_vaddr 668*b74652f3SRagothaman Jayaraman + OFF_CTRL_LEN); 669*b74652f3SRagothaman Jayaraman uint64_t *src = fc_params->iv_buf; 670*b74652f3SRagothaman Jayaraman dest[0] = src[0]; 671*b74652f3SRagothaman Jayaraman dest[1] = src[1]; 672*b74652f3SRagothaman Jayaraman } 673*b74652f3SRagothaman Jayaraman 674*b74652f3SRagothaman Jayaraman *(uint64_t *)offset_vaddr = 675*b74652f3SRagothaman Jayaraman rte_cpu_to_be_64(((uint64_t)encr_offset << 16) | 676*b74652f3SRagothaman Jayaraman ((uint64_t)iv_offset << 8) | 677*b74652f3SRagothaman Jayaraman ((uint64_t)auth_offset)); 678*b74652f3SRagothaman Jayaraman 679*b74652f3SRagothaman Jayaraman /* DPTR has SG list */ 680*b74652f3SRagothaman Jayaraman in_buffer = m_vaddr; 681*b74652f3SRagothaman Jayaraman dptr_dma = m_dma; 682*b74652f3SRagothaman Jayaraman 683*b74652f3SRagothaman Jayaraman ((uint16_t *)in_buffer)[0] = 0; 684*b74652f3SRagothaman Jayaraman ((uint16_t *)in_buffer)[1] = 0; 685*b74652f3SRagothaman Jayaraman 686*b74652f3SRagothaman Jayaraman /* TODO Add error check if space will be sufficient */ 687*b74652f3SRagothaman Jayaraman gather_comp = (sg_comp_t *)((uint8_t *)m_vaddr + 8); 688*b74652f3SRagothaman Jayaraman 689*b74652f3SRagothaman Jayaraman /* 690*b74652f3SRagothaman Jayaraman * Input Gather List 691*b74652f3SRagothaman Jayaraman */ 692*b74652f3SRagothaman Jayaraman 693*b74652f3SRagothaman Jayaraman i = 0; 694*b74652f3SRagothaman Jayaraman 695*b74652f3SRagothaman Jayaraman /* Offset control word that includes iv */ 696*b74652f3SRagothaman Jayaraman i = fill_sg_comp(gather_comp, i, offset_dma, 697*b74652f3SRagothaman Jayaraman OFF_CTRL_LEN + iv_len); 698*b74652f3SRagothaman Jayaraman 699*b74652f3SRagothaman Jayaraman /* Add input data */ 700*b74652f3SRagothaman Jayaraman size = inputlen - iv_len; 701*b74652f3SRagothaman Jayaraman if (likely(size)) { 702*b74652f3SRagothaman Jayaraman uint32_t aad_offset = aad_len ? passthrough_len : 0; 703*b74652f3SRagothaman Jayaraman 704*b74652f3SRagothaman Jayaraman if (unlikely(flags & SINGLE_BUF_INPLACE)) { 705*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_buf_min(gather_comp, i, 706*b74652f3SRagothaman Jayaraman fc_params->bufs, 707*b74652f3SRagothaman Jayaraman &size); 708*b74652f3SRagothaman Jayaraman } else { 709*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_iov(gather_comp, i, 710*b74652f3SRagothaman Jayaraman fc_params->src_iov, 711*b74652f3SRagothaman Jayaraman 0, &size, 712*b74652f3SRagothaman Jayaraman aad_buf, aad_offset); 713*b74652f3SRagothaman Jayaraman } 714*b74652f3SRagothaman Jayaraman 715*b74652f3SRagothaman Jayaraman if (unlikely(size)) { 716*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Insufficient buffer space," 717*b74652f3SRagothaman Jayaraman " size %d needed", size); 718*b74652f3SRagothaman Jayaraman return ERR_BAD_INPUT_ARG; 719*b74652f3SRagothaman Jayaraman } 720*b74652f3SRagothaman Jayaraman } 721*b74652f3SRagothaman Jayaraman ((uint16_t *)in_buffer)[2] = rte_cpu_to_be_16(i); 722*b74652f3SRagothaman Jayaraman g_size_bytes = ((i + 3) / 4) * sizeof(sg_comp_t); 723*b74652f3SRagothaman Jayaraman 724*b74652f3SRagothaman Jayaraman /* 725*b74652f3SRagothaman Jayaraman * Output Scatter list 726*b74652f3SRagothaman Jayaraman */ 727*b74652f3SRagothaman Jayaraman i = 0; 728*b74652f3SRagothaman Jayaraman scatter_comp = 729*b74652f3SRagothaman Jayaraman (sg_comp_t *)((uint8_t *)gather_comp + g_size_bytes); 730*b74652f3SRagothaman Jayaraman 731*b74652f3SRagothaman Jayaraman /* Add IV */ 732*b74652f3SRagothaman Jayaraman if (likely(iv_len)) { 733*b74652f3SRagothaman Jayaraman i = fill_sg_comp(scatter_comp, i, 734*b74652f3SRagothaman Jayaraman offset_dma + OFF_CTRL_LEN, 735*b74652f3SRagothaman Jayaraman iv_len); 736*b74652f3SRagothaman Jayaraman } 737*b74652f3SRagothaman Jayaraman 738*b74652f3SRagothaman Jayaraman /* output data or output data + digest*/ 739*b74652f3SRagothaman Jayaraman if (unlikely(flags & VALID_MAC_BUF)) { 740*b74652f3SRagothaman Jayaraman size = outputlen - iv_len - mac_len; 741*b74652f3SRagothaman Jayaraman if (size) { 742*b74652f3SRagothaman Jayaraman uint32_t aad_offset = 743*b74652f3SRagothaman Jayaraman aad_len ? passthrough_len : 0; 744*b74652f3SRagothaman Jayaraman 745*b74652f3SRagothaman Jayaraman if (unlikely(flags & SINGLE_BUF_INPLACE)) { 746*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_buf_min( 747*b74652f3SRagothaman Jayaraman scatter_comp, 748*b74652f3SRagothaman Jayaraman i, 749*b74652f3SRagothaman Jayaraman fc_params->bufs, 750*b74652f3SRagothaman Jayaraman &size); 751*b74652f3SRagothaman Jayaraman } else { 752*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_iov(scatter_comp, 753*b74652f3SRagothaman Jayaraman i, 754*b74652f3SRagothaman Jayaraman fc_params->dst_iov, 755*b74652f3SRagothaman Jayaraman 0, 756*b74652f3SRagothaman Jayaraman &size, 757*b74652f3SRagothaman Jayaraman aad_buf, 758*b74652f3SRagothaman Jayaraman aad_offset); 759*b74652f3SRagothaman Jayaraman } 760*b74652f3SRagothaman Jayaraman if (size) 761*b74652f3SRagothaman Jayaraman return ERR_BAD_INPUT_ARG; 762*b74652f3SRagothaman Jayaraman } 763*b74652f3SRagothaman Jayaraman /* mac_data */ 764*b74652f3SRagothaman Jayaraman if (mac_len) { 765*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_buf(scatter_comp, i, 766*b74652f3SRagothaman Jayaraman &fc_params->mac_buf); 767*b74652f3SRagothaman Jayaraman } 768*b74652f3SRagothaman Jayaraman } else { 769*b74652f3SRagothaman Jayaraman /* Output including mac */ 770*b74652f3SRagothaman Jayaraman size = outputlen - iv_len; 771*b74652f3SRagothaman Jayaraman if (likely(size)) { 772*b74652f3SRagothaman Jayaraman uint32_t aad_offset = 773*b74652f3SRagothaman Jayaraman aad_len ? passthrough_len : 0; 774*b74652f3SRagothaman Jayaraman 775*b74652f3SRagothaman Jayaraman if (unlikely(flags & SINGLE_BUF_INPLACE)) { 776*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_buf_min( 777*b74652f3SRagothaman Jayaraman scatter_comp, 778*b74652f3SRagothaman Jayaraman i, 779*b74652f3SRagothaman Jayaraman fc_params->bufs, 780*b74652f3SRagothaman Jayaraman &size); 781*b74652f3SRagothaman Jayaraman } else { 782*b74652f3SRagothaman Jayaraman i = fill_sg_comp_from_iov(scatter_comp, 783*b74652f3SRagothaman Jayaraman i, 784*b74652f3SRagothaman Jayaraman fc_params->dst_iov, 785*b74652f3SRagothaman Jayaraman 0, 786*b74652f3SRagothaman Jayaraman &size, 787*b74652f3SRagothaman Jayaraman aad_buf, 788*b74652f3SRagothaman Jayaraman aad_offset); 789*b74652f3SRagothaman Jayaraman } 790*b74652f3SRagothaman Jayaraman if (unlikely(size)) { 791*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Insufficient buffer" 792*b74652f3SRagothaman Jayaraman " space, size %d needed", 793*b74652f3SRagothaman Jayaraman size); 794*b74652f3SRagothaman Jayaraman return ERR_BAD_INPUT_ARG; 795*b74652f3SRagothaman Jayaraman } 796*b74652f3SRagothaman Jayaraman } 797*b74652f3SRagothaman Jayaraman } 798*b74652f3SRagothaman Jayaraman ((uint16_t *)in_buffer)[3] = rte_cpu_to_be_16(i); 799*b74652f3SRagothaman Jayaraman s_size_bytes = ((i + 3) / 4) * sizeof(sg_comp_t); 800*b74652f3SRagothaman Jayaraman 801*b74652f3SRagothaman Jayaraman size = g_size_bytes + s_size_bytes + SG_LIST_HDR_SIZE; 802*b74652f3SRagothaman Jayaraman 803*b74652f3SRagothaman Jayaraman /* This is DPTR len incase of SG mode */ 804*b74652f3SRagothaman Jayaraman vq_cmd_w0.s.dlen = rte_cpu_to_be_16(size); 805*b74652f3SRagothaman Jayaraman 806*b74652f3SRagothaman Jayaraman m_vaddr = (uint8_t *)m_vaddr + size; 807*b74652f3SRagothaman Jayaraman m_dma += size; 808*b74652f3SRagothaman Jayaraman m_size -= size; 809*b74652f3SRagothaman Jayaraman 810*b74652f3SRagothaman Jayaraman /* cpt alternate completion address saved earlier */ 811*b74652f3SRagothaman Jayaraman req->alternate_caddr = (uint64_t *)((uint8_t *)c_vaddr - 8); 812*b74652f3SRagothaman Jayaraman *req->alternate_caddr = ~((uint64_t)COMPLETION_CODE_INIT); 813*b74652f3SRagothaman Jayaraman rptr_dma = c_dma - 8; 814*b74652f3SRagothaman Jayaraman 815*b74652f3SRagothaman Jayaraman req->ist.ei1 = dptr_dma; 816*b74652f3SRagothaman Jayaraman req->ist.ei2 = rptr_dma; 817*b74652f3SRagothaman Jayaraman } 818*b74652f3SRagothaman Jayaraman 819*b74652f3SRagothaman Jayaraman /* First 16-bit swap then 64-bit swap */ 820*b74652f3SRagothaman Jayaraman /* TODO: HACK: Reverse the vq_cmd and cpt_req bit field definitions 821*b74652f3SRagothaman Jayaraman * to eliminate all the swapping 822*b74652f3SRagothaman Jayaraman */ 823*b74652f3SRagothaman Jayaraman vq_cmd_w0.u64 = rte_cpu_to_be_64(vq_cmd_w0.u64); 824*b74652f3SRagothaman Jayaraman 825*b74652f3SRagothaman Jayaraman ctx_dma = fc_params->ctx_buf.dma_addr + 826*b74652f3SRagothaman Jayaraman offsetof(struct cpt_ctx, fctx); 827*b74652f3SRagothaman Jayaraman /* vq command w3 */ 828*b74652f3SRagothaman Jayaraman vq_cmd_w3.u64 = 0; 829*b74652f3SRagothaman Jayaraman vq_cmd_w3.s.grp = 0; 830*b74652f3SRagothaman Jayaraman vq_cmd_w3.s.cptr = ctx_dma; 831*b74652f3SRagothaman Jayaraman 832*b74652f3SRagothaman Jayaraman /* 16 byte aligned cpt res address */ 833*b74652f3SRagothaman Jayaraman req->completion_addr = (uint64_t *)((uint8_t *)c_vaddr); 834*b74652f3SRagothaman Jayaraman *req->completion_addr = COMPLETION_CODE_INIT; 835*b74652f3SRagothaman Jayaraman req->comp_baddr = c_dma; 836*b74652f3SRagothaman Jayaraman 837*b74652f3SRagothaman Jayaraman /* Fill microcode part of instruction */ 838*b74652f3SRagothaman Jayaraman req->ist.ei0 = vq_cmd_w0.u64; 839*b74652f3SRagothaman Jayaraman req->ist.ei3 = vq_cmd_w3.u64; 840*b74652f3SRagothaman Jayaraman 841*b74652f3SRagothaman Jayaraman req->op = op; 842*b74652f3SRagothaman Jayaraman 843*b74652f3SRagothaman Jayaraman *prep_req = req; 844*b74652f3SRagothaman Jayaraman return 0; 845*b74652f3SRagothaman Jayaraman } 846*b74652f3SRagothaman Jayaraman 847*b74652f3SRagothaman Jayaraman static __rte_always_inline void *__hot 848*b74652f3SRagothaman Jayaraman cpt_fc_enc_hmac_prep(uint32_t flags, uint64_t d_offs, uint64_t d_lens, 849*b74652f3SRagothaman Jayaraman fc_params_t *fc_params, void *op, int *ret_val) 850*b74652f3SRagothaman Jayaraman { 851*b74652f3SRagothaman Jayaraman struct cpt_ctx *ctx = fc_params->ctx_buf.vaddr; 852*b74652f3SRagothaman Jayaraman uint8_t fc_type; 853*b74652f3SRagothaman Jayaraman void *prep_req = NULL; 854*b74652f3SRagothaman Jayaraman int ret; 855*b74652f3SRagothaman Jayaraman 856*b74652f3SRagothaman Jayaraman fc_type = ctx->fc_type; 857*b74652f3SRagothaman Jayaraman 858*b74652f3SRagothaman Jayaraman /* Common api for rest of the ops */ 859*b74652f3SRagothaman Jayaraman if (likely(fc_type == FC_GEN)) { 860*b74652f3SRagothaman Jayaraman ret = cpt_enc_hmac_prep(flags, d_offs, d_lens, 861*b74652f3SRagothaman Jayaraman fc_params, op, &prep_req); 862*b74652f3SRagothaman Jayaraman } else { 863*b74652f3SRagothaman Jayaraman ret = ERR_EIO; 864*b74652f3SRagothaman Jayaraman } 865*b74652f3SRagothaman Jayaraman 866*b74652f3SRagothaman Jayaraman if (unlikely(!prep_req)) 867*b74652f3SRagothaman Jayaraman *ret_val = ret; 868*b74652f3SRagothaman Jayaraman return prep_req; 869*b74652f3SRagothaman Jayaraman } 870*b74652f3SRagothaman Jayaraman 8716cc54096SNithin Dabilpuram static __rte_always_inline int 8726cc54096SNithin Dabilpuram cpt_fc_auth_set_key(void *ctx, auth_type_t type, uint8_t *key, 8736cc54096SNithin Dabilpuram uint16_t key_len, uint16_t mac_len) 8746cc54096SNithin Dabilpuram { 8756cc54096SNithin Dabilpuram struct cpt_ctx *cpt_ctx = ctx; 8766cc54096SNithin Dabilpuram mc_fc_context_t *fctx = &cpt_ctx->fctx; 8776cc54096SNithin Dabilpuram uint64_t *ctrl_flags = NULL; 8786cc54096SNithin Dabilpuram 8796cc54096SNithin Dabilpuram if ((type >= ZUC_EIA3) && (type <= KASUMI_F9_ECB)) { 8806cc54096SNithin Dabilpuram uint32_t keyx[4]; 8816cc54096SNithin Dabilpuram 8826cc54096SNithin Dabilpuram if (key_len != 16) 8836cc54096SNithin Dabilpuram return -1; 8846cc54096SNithin Dabilpuram /* No support for AEAD yet */ 8856cc54096SNithin Dabilpuram if (cpt_ctx->enc_cipher) 8866cc54096SNithin Dabilpuram return -1; 8876cc54096SNithin Dabilpuram /* For ZUC/SNOW3G/Kasumi */ 8886cc54096SNithin Dabilpuram switch (type) { 8896cc54096SNithin Dabilpuram case SNOW3G_UIA2: 8906cc54096SNithin Dabilpuram cpt_ctx->snow3g = 1; 8916cc54096SNithin Dabilpuram gen_key_snow3g(key, keyx); 8926cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.ci_key, keyx, key_len); 8936cc54096SNithin Dabilpuram cpt_ctx->fc_type = ZUC_SNOW3G; 8946cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0x1; 8956cc54096SNithin Dabilpuram break; 8966cc54096SNithin Dabilpuram case ZUC_EIA3: 8976cc54096SNithin Dabilpuram cpt_ctx->snow3g = 0; 8986cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.ci_key, key, key_len); 8996cc54096SNithin Dabilpuram memcpy(cpt_ctx->zs_ctx.zuc_const, zuc_d, 32); 9006cc54096SNithin Dabilpuram cpt_ctx->fc_type = ZUC_SNOW3G; 9016cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0x1; 9026cc54096SNithin Dabilpuram break; 9036cc54096SNithin Dabilpuram case KASUMI_F9_ECB: 9046cc54096SNithin Dabilpuram /* Kasumi ECB mode */ 9056cc54096SNithin Dabilpuram cpt_ctx->k_ecb = 1; 9066cc54096SNithin Dabilpuram memcpy(cpt_ctx->k_ctx.ci_key, key, key_len); 9076cc54096SNithin Dabilpuram cpt_ctx->fc_type = KASUMI; 9086cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0x1; 9096cc54096SNithin Dabilpuram break; 9106cc54096SNithin Dabilpuram case KASUMI_F9_CBC: 9116cc54096SNithin Dabilpuram memcpy(cpt_ctx->k_ctx.ci_key, key, key_len); 9126cc54096SNithin Dabilpuram cpt_ctx->fc_type = KASUMI; 9136cc54096SNithin Dabilpuram cpt_ctx->zsk_flags = 0x1; 9146cc54096SNithin Dabilpuram break; 9156cc54096SNithin Dabilpuram default: 9166cc54096SNithin Dabilpuram return -1; 9176cc54096SNithin Dabilpuram } 9186cc54096SNithin Dabilpuram cpt_ctx->mac_len = 4; 9196cc54096SNithin Dabilpuram cpt_ctx->hash_type = type; 9206cc54096SNithin Dabilpuram return 0; 9216cc54096SNithin Dabilpuram } 9226cc54096SNithin Dabilpuram 9236cc54096SNithin Dabilpuram if (!(cpt_ctx->fc_type == FC_GEN && !type)) { 9246cc54096SNithin Dabilpuram if (!cpt_ctx->fc_type || !cpt_ctx->enc_cipher) 9256cc54096SNithin Dabilpuram cpt_ctx->fc_type = HASH_HMAC; 9266cc54096SNithin Dabilpuram } 9276cc54096SNithin Dabilpuram 9286cc54096SNithin Dabilpuram ctrl_flags = (uint64_t *)&fctx->enc.enc_ctrl.flags; 9296cc54096SNithin Dabilpuram *ctrl_flags = rte_be_to_cpu_64(*ctrl_flags); 9306cc54096SNithin Dabilpuram 9316cc54096SNithin Dabilpuram /* For GMAC auth, cipher must be NULL */ 9326cc54096SNithin Dabilpuram if (type == GMAC_TYPE) 9336cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).enc_cipher = 0; 9346cc54096SNithin Dabilpuram 9356cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).hash_type = cpt_ctx->hash_type = type; 9366cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).mac_len = cpt_ctx->mac_len = mac_len; 9376cc54096SNithin Dabilpuram 9386cc54096SNithin Dabilpuram if (key_len) { 9396cc54096SNithin Dabilpuram cpt_ctx->hmac = 1; 9406cc54096SNithin Dabilpuram memset(cpt_ctx->auth_key, 0, sizeof(cpt_ctx->auth_key)); 9416cc54096SNithin Dabilpuram memcpy(cpt_ctx->auth_key, key, key_len); 9426cc54096SNithin Dabilpuram cpt_ctx->auth_key_len = key_len; 9436cc54096SNithin Dabilpuram memset(fctx->hmac.ipad, 0, sizeof(fctx->hmac.ipad)); 9446cc54096SNithin Dabilpuram memset(fctx->hmac.opad, 0, sizeof(fctx->hmac.opad)); 9456cc54096SNithin Dabilpuram memcpy(fctx->hmac.opad, key, key_len); 9466cc54096SNithin Dabilpuram CPT_P_ENC_CTRL(fctx).auth_input_type = 1; 9476cc54096SNithin Dabilpuram } 9486cc54096SNithin Dabilpuram *ctrl_flags = rte_cpu_to_be_64(*ctrl_flags); 9496cc54096SNithin Dabilpuram return 0; 9506cc54096SNithin Dabilpuram } 9516cc54096SNithin Dabilpuram 9526cc54096SNithin Dabilpuram static __rte_always_inline int 9536cc54096SNithin Dabilpuram fill_sess_aead(struct rte_crypto_sym_xform *xform, 9546cc54096SNithin Dabilpuram struct cpt_sess_misc *sess) 9556cc54096SNithin Dabilpuram { 9566cc54096SNithin Dabilpuram struct rte_crypto_aead_xform *aead_form; 9576cc54096SNithin Dabilpuram cipher_type_t enc_type = 0; /* NULL Cipher type */ 9586cc54096SNithin Dabilpuram auth_type_t auth_type = 0; /* NULL Auth type */ 9596cc54096SNithin Dabilpuram uint32_t cipher_key_len = 0; 9606cc54096SNithin Dabilpuram uint8_t zsk_flag = 0, aes_gcm = 0; 9616cc54096SNithin Dabilpuram aead_form = &xform->aead; 9626cc54096SNithin Dabilpuram void *ctx; 9636cc54096SNithin Dabilpuram 9646cc54096SNithin Dabilpuram if (aead_form->op == RTE_CRYPTO_AEAD_OP_ENCRYPT && 9656cc54096SNithin Dabilpuram aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) { 9666cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_CIPHER_ENCRYPT; 9676cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_AUTH_GENERATE; 9686cc54096SNithin Dabilpuram } else if (aead_form->op == RTE_CRYPTO_AEAD_OP_DECRYPT && 9696cc54096SNithin Dabilpuram aead_form->algo == RTE_CRYPTO_AEAD_AES_GCM) { 9706cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_CIPHER_DECRYPT; 9716cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_AUTH_VERIFY; 9726cc54096SNithin Dabilpuram } else { 9736cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Unknown cipher operation\n"); 9746cc54096SNithin Dabilpuram return -1; 9756cc54096SNithin Dabilpuram } 9766cc54096SNithin Dabilpuram switch (aead_form->algo) { 9776cc54096SNithin Dabilpuram case RTE_CRYPTO_AEAD_AES_GCM: 9786cc54096SNithin Dabilpuram enc_type = AES_GCM; 9796cc54096SNithin Dabilpuram cipher_key_len = 16; 9806cc54096SNithin Dabilpuram aes_gcm = 1; 9816cc54096SNithin Dabilpuram break; 9826cc54096SNithin Dabilpuram case RTE_CRYPTO_AEAD_AES_CCM: 9836cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Unsupported cipher algo %u", 9846cc54096SNithin Dabilpuram aead_form->algo); 9856cc54096SNithin Dabilpuram return -1; 9866cc54096SNithin Dabilpuram default: 9876cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified", 9886cc54096SNithin Dabilpuram aead_form->algo); 9896cc54096SNithin Dabilpuram return -1; 9906cc54096SNithin Dabilpuram } 9916cc54096SNithin Dabilpuram if (aead_form->key.length < cipher_key_len) { 9926cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Invalid cipher params keylen %lu", 9936cc54096SNithin Dabilpuram (unsigned int long)aead_form->key.length); 9946cc54096SNithin Dabilpuram return -1; 9956cc54096SNithin Dabilpuram } 9966cc54096SNithin Dabilpuram sess->zsk_flag = zsk_flag; 9976cc54096SNithin Dabilpuram sess->aes_gcm = aes_gcm; 9986cc54096SNithin Dabilpuram sess->mac_len = aead_form->digest_length; 9996cc54096SNithin Dabilpuram sess->iv_offset = aead_form->iv.offset; 10006cc54096SNithin Dabilpuram sess->iv_length = aead_form->iv.length; 10016cc54096SNithin Dabilpuram sess->aad_length = aead_form->aad_length; 10026cc54096SNithin Dabilpuram ctx = (void *)((uint8_t *)sess + sizeof(struct cpt_sess_misc)), 10036cc54096SNithin Dabilpuram 10046cc54096SNithin Dabilpuram cpt_fc_ciph_set_key(ctx, enc_type, aead_form->key.data, 10056cc54096SNithin Dabilpuram aead_form->key.length, NULL); 10066cc54096SNithin Dabilpuram 10076cc54096SNithin Dabilpuram cpt_fc_auth_set_key(ctx, auth_type, NULL, 0, aead_form->digest_length); 10086cc54096SNithin Dabilpuram 10096cc54096SNithin Dabilpuram return 0; 10106cc54096SNithin Dabilpuram } 10116cc54096SNithin Dabilpuram 10126cc54096SNithin Dabilpuram static __rte_always_inline int 10136cc54096SNithin Dabilpuram fill_sess_cipher(struct rte_crypto_sym_xform *xform, 10146cc54096SNithin Dabilpuram struct cpt_sess_misc *sess) 10156cc54096SNithin Dabilpuram { 10166cc54096SNithin Dabilpuram struct rte_crypto_cipher_xform *c_form; 10176cc54096SNithin Dabilpuram cipher_type_t enc_type = 0; /* NULL Cipher type */ 10186cc54096SNithin Dabilpuram uint32_t cipher_key_len = 0; 10196cc54096SNithin Dabilpuram uint8_t zsk_flag = 0, aes_gcm = 0, aes_ctr = 0, is_null = 0; 10206cc54096SNithin Dabilpuram 10216cc54096SNithin Dabilpuram if (xform->type != RTE_CRYPTO_SYM_XFORM_CIPHER) 10226cc54096SNithin Dabilpuram return -1; 10236cc54096SNithin Dabilpuram 10246cc54096SNithin Dabilpuram c_form = &xform->cipher; 10256cc54096SNithin Dabilpuram 10266cc54096SNithin Dabilpuram if (c_form->op == RTE_CRYPTO_CIPHER_OP_ENCRYPT) 10276cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_CIPHER_ENCRYPT; 10286cc54096SNithin Dabilpuram else if (c_form->op == RTE_CRYPTO_CIPHER_OP_DECRYPT) 10296cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_CIPHER_DECRYPT; 10306cc54096SNithin Dabilpuram else { 10316cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Unknown cipher operation\n"); 10326cc54096SNithin Dabilpuram return -1; 10336cc54096SNithin Dabilpuram } 10346cc54096SNithin Dabilpuram 10356cc54096SNithin Dabilpuram switch (c_form->algo) { 10366cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_AES_CBC: 10376cc54096SNithin Dabilpuram enc_type = AES_CBC; 10386cc54096SNithin Dabilpuram cipher_key_len = 16; 10396cc54096SNithin Dabilpuram break; 10406cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_3DES_CBC: 10416cc54096SNithin Dabilpuram enc_type = DES3_CBC; 10426cc54096SNithin Dabilpuram cipher_key_len = 24; 10436cc54096SNithin Dabilpuram break; 10446cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_DES_CBC: 10456cc54096SNithin Dabilpuram /* DES is implemented using 3DES in hardware */ 10466cc54096SNithin Dabilpuram enc_type = DES3_CBC; 10476cc54096SNithin Dabilpuram cipher_key_len = 8; 10486cc54096SNithin Dabilpuram break; 10496cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_AES_CTR: 10506cc54096SNithin Dabilpuram enc_type = AES_CTR; 10516cc54096SNithin Dabilpuram cipher_key_len = 16; 10526cc54096SNithin Dabilpuram aes_ctr = 1; 10536cc54096SNithin Dabilpuram break; 10546cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_NULL: 10556cc54096SNithin Dabilpuram enc_type = 0; 10566cc54096SNithin Dabilpuram is_null = 1; 10576cc54096SNithin Dabilpuram break; 10586cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_KASUMI_F8: 10596cc54096SNithin Dabilpuram enc_type = KASUMI_F8_ECB; 10606cc54096SNithin Dabilpuram cipher_key_len = 16; 10616cc54096SNithin Dabilpuram zsk_flag = K_F8; 10626cc54096SNithin Dabilpuram break; 10636cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_SNOW3G_UEA2: 10646cc54096SNithin Dabilpuram enc_type = SNOW3G_UEA2; 10656cc54096SNithin Dabilpuram cipher_key_len = 16; 10666cc54096SNithin Dabilpuram zsk_flag = ZS_EA; 10676cc54096SNithin Dabilpuram break; 10686cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_ZUC_EEA3: 10696cc54096SNithin Dabilpuram enc_type = ZUC_EEA3; 10706cc54096SNithin Dabilpuram cipher_key_len = 16; 10716cc54096SNithin Dabilpuram zsk_flag = ZS_EA; 10726cc54096SNithin Dabilpuram break; 10736cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_AES_XTS: 10746cc54096SNithin Dabilpuram enc_type = AES_XTS; 10756cc54096SNithin Dabilpuram cipher_key_len = 16; 10766cc54096SNithin Dabilpuram break; 10776cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_3DES_ECB: 10786cc54096SNithin Dabilpuram enc_type = DES3_ECB; 10796cc54096SNithin Dabilpuram cipher_key_len = 24; 10806cc54096SNithin Dabilpuram break; 10816cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_AES_ECB: 10826cc54096SNithin Dabilpuram enc_type = AES_ECB; 10836cc54096SNithin Dabilpuram cipher_key_len = 16; 10846cc54096SNithin Dabilpuram break; 10856cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_3DES_CTR: 10866cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_AES_F8: 10876cc54096SNithin Dabilpuram case RTE_CRYPTO_CIPHER_ARC4: 10886cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Unsupported cipher algo %u", 10896cc54096SNithin Dabilpuram c_form->algo); 10906cc54096SNithin Dabilpuram return -1; 10916cc54096SNithin Dabilpuram default: 10926cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified", 10936cc54096SNithin Dabilpuram c_form->algo); 10946cc54096SNithin Dabilpuram return -1; 10956cc54096SNithin Dabilpuram } 10966cc54096SNithin Dabilpuram 10976cc54096SNithin Dabilpuram if (c_form->key.length < cipher_key_len) { 10986cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Invalid cipher params keylen %lu", 10996cc54096SNithin Dabilpuram (unsigned long) c_form->key.length); 11006cc54096SNithin Dabilpuram return -1; 11016cc54096SNithin Dabilpuram } 11026cc54096SNithin Dabilpuram 11036cc54096SNithin Dabilpuram sess->zsk_flag = zsk_flag; 11046cc54096SNithin Dabilpuram sess->aes_gcm = aes_gcm; 11056cc54096SNithin Dabilpuram sess->aes_ctr = aes_ctr; 11066cc54096SNithin Dabilpuram sess->iv_offset = c_form->iv.offset; 11076cc54096SNithin Dabilpuram sess->iv_length = c_form->iv.length; 11086cc54096SNithin Dabilpuram sess->is_null = is_null; 11096cc54096SNithin Dabilpuram 11106cc54096SNithin Dabilpuram cpt_fc_ciph_set_key(SESS_PRIV(sess), enc_type, c_form->key.data, 11116cc54096SNithin Dabilpuram c_form->key.length, NULL); 11126cc54096SNithin Dabilpuram 11136cc54096SNithin Dabilpuram return 0; 11146cc54096SNithin Dabilpuram } 11156cc54096SNithin Dabilpuram 11166cc54096SNithin Dabilpuram static __rte_always_inline int 11176cc54096SNithin Dabilpuram fill_sess_auth(struct rte_crypto_sym_xform *xform, 11186cc54096SNithin Dabilpuram struct cpt_sess_misc *sess) 11196cc54096SNithin Dabilpuram { 11206cc54096SNithin Dabilpuram struct rte_crypto_auth_xform *a_form; 11216cc54096SNithin Dabilpuram auth_type_t auth_type = 0; /* NULL Auth type */ 11226cc54096SNithin Dabilpuram uint8_t zsk_flag = 0, aes_gcm = 0, is_null = 0; 11236cc54096SNithin Dabilpuram 11246cc54096SNithin Dabilpuram if (xform->type != RTE_CRYPTO_SYM_XFORM_AUTH) 11256cc54096SNithin Dabilpuram goto error_out; 11266cc54096SNithin Dabilpuram 11276cc54096SNithin Dabilpuram a_form = &xform->auth; 11286cc54096SNithin Dabilpuram 11296cc54096SNithin Dabilpuram if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY) 11306cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_AUTH_VERIFY; 11316cc54096SNithin Dabilpuram else if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE) 11326cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_AUTH_GENERATE; 11336cc54096SNithin Dabilpuram else { 11346cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Unknown auth operation"); 11356cc54096SNithin Dabilpuram return -1; 11366cc54096SNithin Dabilpuram } 11376cc54096SNithin Dabilpuram 11386cc54096SNithin Dabilpuram if (a_form->key.length > 64) { 11396cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Auth key length is big"); 11406cc54096SNithin Dabilpuram return -1; 11416cc54096SNithin Dabilpuram } 11426cc54096SNithin Dabilpuram 11436cc54096SNithin Dabilpuram switch (a_form->algo) { 11446cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA1_HMAC: 11456cc54096SNithin Dabilpuram /* Fall through */ 11466cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA1: 11476cc54096SNithin Dabilpuram auth_type = SHA1_TYPE; 11486cc54096SNithin Dabilpuram break; 11496cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA256_HMAC: 11506cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA256: 11516cc54096SNithin Dabilpuram auth_type = SHA2_SHA256; 11526cc54096SNithin Dabilpuram break; 11536cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA512_HMAC: 11546cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA512: 11556cc54096SNithin Dabilpuram auth_type = SHA2_SHA512; 11566cc54096SNithin Dabilpuram break; 11576cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_AES_GMAC: 11586cc54096SNithin Dabilpuram auth_type = GMAC_TYPE; 11596cc54096SNithin Dabilpuram aes_gcm = 1; 11606cc54096SNithin Dabilpuram break; 11616cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA224_HMAC: 11626cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA224: 11636cc54096SNithin Dabilpuram auth_type = SHA2_SHA224; 11646cc54096SNithin Dabilpuram break; 11656cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA384_HMAC: 11666cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SHA384: 11676cc54096SNithin Dabilpuram auth_type = SHA2_SHA384; 11686cc54096SNithin Dabilpuram break; 11696cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_MD5_HMAC: 11706cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_MD5: 11716cc54096SNithin Dabilpuram auth_type = MD5_TYPE; 11726cc54096SNithin Dabilpuram break; 11736cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_KASUMI_F9: 11746cc54096SNithin Dabilpuram auth_type = KASUMI_F9_ECB; 11756cc54096SNithin Dabilpuram /* 11766cc54096SNithin Dabilpuram * Indicate that direction needs to be taken out 11776cc54096SNithin Dabilpuram * from end of src 11786cc54096SNithin Dabilpuram */ 11796cc54096SNithin Dabilpuram zsk_flag = K_F9; 11806cc54096SNithin Dabilpuram break; 11816cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_SNOW3G_UIA2: 11826cc54096SNithin Dabilpuram auth_type = SNOW3G_UIA2; 11836cc54096SNithin Dabilpuram zsk_flag = ZS_IA; 11846cc54096SNithin Dabilpuram break; 11856cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_ZUC_EIA3: 11866cc54096SNithin Dabilpuram auth_type = ZUC_EIA3; 11876cc54096SNithin Dabilpuram zsk_flag = ZS_IA; 11886cc54096SNithin Dabilpuram break; 11896cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_NULL: 11906cc54096SNithin Dabilpuram auth_type = 0; 11916cc54096SNithin Dabilpuram is_null = 1; 11926cc54096SNithin Dabilpuram break; 11936cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_AES_XCBC_MAC: 11946cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_AES_CMAC: 11956cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_AES_CBC_MAC: 11966cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Unsupported hash algo %u", 11976cc54096SNithin Dabilpuram a_form->algo); 11986cc54096SNithin Dabilpuram goto error_out; 11996cc54096SNithin Dabilpuram default: 12006cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Undefined Hash algo %u specified", 12016cc54096SNithin Dabilpuram a_form->algo); 12026cc54096SNithin Dabilpuram goto error_out; 12036cc54096SNithin Dabilpuram } 12046cc54096SNithin Dabilpuram 12056cc54096SNithin Dabilpuram sess->zsk_flag = zsk_flag; 12066cc54096SNithin Dabilpuram sess->aes_gcm = aes_gcm; 12076cc54096SNithin Dabilpuram sess->mac_len = a_form->digest_length; 12086cc54096SNithin Dabilpuram sess->is_null = is_null; 12096cc54096SNithin Dabilpuram if (zsk_flag) { 12106cc54096SNithin Dabilpuram sess->auth_iv_offset = a_form->iv.offset; 12116cc54096SNithin Dabilpuram sess->auth_iv_length = a_form->iv.length; 12126cc54096SNithin Dabilpuram } 12136cc54096SNithin Dabilpuram cpt_fc_auth_set_key(SESS_PRIV(sess), auth_type, a_form->key.data, 12146cc54096SNithin Dabilpuram a_form->key.length, a_form->digest_length); 12156cc54096SNithin Dabilpuram 12166cc54096SNithin Dabilpuram return 0; 12176cc54096SNithin Dabilpuram 12186cc54096SNithin Dabilpuram error_out: 12196cc54096SNithin Dabilpuram return -1; 12206cc54096SNithin Dabilpuram } 12216cc54096SNithin Dabilpuram 12226cc54096SNithin Dabilpuram static __rte_always_inline int 12236cc54096SNithin Dabilpuram fill_sess_gmac(struct rte_crypto_sym_xform *xform, 12246cc54096SNithin Dabilpuram struct cpt_sess_misc *sess) 12256cc54096SNithin Dabilpuram { 12266cc54096SNithin Dabilpuram struct rte_crypto_auth_xform *a_form; 12276cc54096SNithin Dabilpuram cipher_type_t enc_type = 0; /* NULL Cipher type */ 12286cc54096SNithin Dabilpuram auth_type_t auth_type = 0; /* NULL Auth type */ 12296cc54096SNithin Dabilpuram uint8_t zsk_flag = 0, aes_gcm = 0; 12306cc54096SNithin Dabilpuram void *ctx; 12316cc54096SNithin Dabilpuram 12326cc54096SNithin Dabilpuram if (xform->type != RTE_CRYPTO_SYM_XFORM_AUTH) 12336cc54096SNithin Dabilpuram return -1; 12346cc54096SNithin Dabilpuram 12356cc54096SNithin Dabilpuram a_form = &xform->auth; 12366cc54096SNithin Dabilpuram 12376cc54096SNithin Dabilpuram if (a_form->op == RTE_CRYPTO_AUTH_OP_GENERATE) 12386cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_ENCODE; 12396cc54096SNithin Dabilpuram else if (a_form->op == RTE_CRYPTO_AUTH_OP_VERIFY) 12406cc54096SNithin Dabilpuram sess->cpt_op |= CPT_OP_DECODE; 12416cc54096SNithin Dabilpuram else { 12426cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Unknown auth operation"); 12436cc54096SNithin Dabilpuram return -1; 12446cc54096SNithin Dabilpuram } 12456cc54096SNithin Dabilpuram 12466cc54096SNithin Dabilpuram switch (a_form->algo) { 12476cc54096SNithin Dabilpuram case RTE_CRYPTO_AUTH_AES_GMAC: 12486cc54096SNithin Dabilpuram enc_type = AES_GCM; 12496cc54096SNithin Dabilpuram auth_type = GMAC_TYPE; 12506cc54096SNithin Dabilpuram break; 12516cc54096SNithin Dabilpuram default: 12526cc54096SNithin Dabilpuram CPT_LOG_DP_ERR("Crypto: Undefined cipher algo %u specified", 12536cc54096SNithin Dabilpuram a_form->algo); 12546cc54096SNithin Dabilpuram return -1; 12556cc54096SNithin Dabilpuram } 12566cc54096SNithin Dabilpuram 12576cc54096SNithin Dabilpuram sess->zsk_flag = zsk_flag; 12586cc54096SNithin Dabilpuram sess->aes_gcm = aes_gcm; 12596cc54096SNithin Dabilpuram sess->is_gmac = 1; 12606cc54096SNithin Dabilpuram sess->iv_offset = a_form->iv.offset; 12616cc54096SNithin Dabilpuram sess->iv_length = a_form->iv.length; 12626cc54096SNithin Dabilpuram sess->mac_len = a_form->digest_length; 12636cc54096SNithin Dabilpuram ctx = (void *)((uint8_t *)sess + sizeof(struct cpt_sess_misc)), 12646cc54096SNithin Dabilpuram 12656cc54096SNithin Dabilpuram cpt_fc_ciph_set_key(ctx, enc_type, a_form->key.data, 12666cc54096SNithin Dabilpuram a_form->key.length, NULL); 12676cc54096SNithin Dabilpuram cpt_fc_auth_set_key(ctx, auth_type, NULL, 0, a_form->digest_length); 12686cc54096SNithin Dabilpuram 12696cc54096SNithin Dabilpuram return 0; 12706cc54096SNithin Dabilpuram } 12716cc54096SNithin Dabilpuram 1272*b74652f3SRagothaman Jayaraman static __rte_always_inline void * 1273*b74652f3SRagothaman Jayaraman alloc_op_meta(struct rte_mbuf *m_src, 1274*b74652f3SRagothaman Jayaraman buf_ptr_t *buf, 1275*b74652f3SRagothaman Jayaraman int32_t len, 1276*b74652f3SRagothaman Jayaraman struct rte_mempool *cpt_meta_pool) 1277*b74652f3SRagothaman Jayaraman { 1278*b74652f3SRagothaman Jayaraman uint8_t *mdata; 1279*b74652f3SRagothaman Jayaraman 1280*b74652f3SRagothaman Jayaraman #ifndef CPT_ALWAYS_USE_SEPARATE_BUF 1281*b74652f3SRagothaman Jayaraman if (likely(m_src && (m_src->nb_segs == 1))) { 1282*b74652f3SRagothaman Jayaraman int32_t tailroom; 1283*b74652f3SRagothaman Jayaraman phys_addr_t mphys; 1284*b74652f3SRagothaman Jayaraman 1285*b74652f3SRagothaman Jayaraman /* Check if tailroom is sufficient to hold meta data */ 1286*b74652f3SRagothaman Jayaraman tailroom = rte_pktmbuf_tailroom(m_src); 1287*b74652f3SRagothaman Jayaraman if (likely(tailroom > len + 8)) { 1288*b74652f3SRagothaman Jayaraman mdata = (uint8_t *)m_src->buf_addr + m_src->buf_len; 1289*b74652f3SRagothaman Jayaraman mphys = m_src->buf_physaddr + m_src->buf_len; 1290*b74652f3SRagothaman Jayaraman mdata -= len; 1291*b74652f3SRagothaman Jayaraman mphys -= len; 1292*b74652f3SRagothaman Jayaraman buf->vaddr = mdata; 1293*b74652f3SRagothaman Jayaraman buf->dma_addr = mphys; 1294*b74652f3SRagothaman Jayaraman buf->size = len; 1295*b74652f3SRagothaman Jayaraman /* Indicate that this is a mbuf allocated mdata */ 1296*b74652f3SRagothaman Jayaraman mdata = (uint8_t *)((uint64_t)mdata | 1ull); 1297*b74652f3SRagothaman Jayaraman return mdata; 1298*b74652f3SRagothaman Jayaraman } 1299*b74652f3SRagothaman Jayaraman } 1300*b74652f3SRagothaman Jayaraman #else 1301*b74652f3SRagothaman Jayaraman RTE_SET_USED(m_src); 1302*b74652f3SRagothaman Jayaraman #endif 1303*b74652f3SRagothaman Jayaraman 1304*b74652f3SRagothaman Jayaraman if (unlikely(rte_mempool_get(cpt_meta_pool, (void **)&mdata) < 0)) 1305*b74652f3SRagothaman Jayaraman return NULL; 1306*b74652f3SRagothaman Jayaraman 1307*b74652f3SRagothaman Jayaraman buf->vaddr = mdata; 1308*b74652f3SRagothaman Jayaraman buf->dma_addr = rte_mempool_virt2iova(mdata); 1309*b74652f3SRagothaman Jayaraman buf->size = len; 1310*b74652f3SRagothaman Jayaraman 1311*b74652f3SRagothaman Jayaraman return mdata; 1312*b74652f3SRagothaman Jayaraman } 1313*b74652f3SRagothaman Jayaraman 1314*b74652f3SRagothaman Jayaraman /** 1315*b74652f3SRagothaman Jayaraman * cpt_free_metabuf - free metabuf to mempool. 1316*b74652f3SRagothaman Jayaraman * @param instance: pointer to instance. 1317*b74652f3SRagothaman Jayaraman * @param objp: pointer to the metabuf. 1318*b74652f3SRagothaman Jayaraman */ 1319*b74652f3SRagothaman Jayaraman static __rte_always_inline void 1320*b74652f3SRagothaman Jayaraman free_op_meta(void *mdata, struct rte_mempool *cpt_meta_pool) 1321*b74652f3SRagothaman Jayaraman { 1322*b74652f3SRagothaman Jayaraman bool nofree = ((uintptr_t)mdata & 1ull); 1323*b74652f3SRagothaman Jayaraman 1324*b74652f3SRagothaman Jayaraman if (likely(nofree)) 1325*b74652f3SRagothaman Jayaraman return; 1326*b74652f3SRagothaman Jayaraman rte_mempool_put(cpt_meta_pool, mdata); 1327*b74652f3SRagothaman Jayaraman } 1328*b74652f3SRagothaman Jayaraman 1329*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 1330*b74652f3SRagothaman Jayaraman prepare_iov_from_pkt(struct rte_mbuf *pkt, 1331*b74652f3SRagothaman Jayaraman iov_ptr_t *iovec, uint32_t start_offset) 1332*b74652f3SRagothaman Jayaraman { 1333*b74652f3SRagothaman Jayaraman uint16_t index = 0; 1334*b74652f3SRagothaman Jayaraman void *seg_data = NULL; 1335*b74652f3SRagothaman Jayaraman phys_addr_t seg_phys; 1336*b74652f3SRagothaman Jayaraman int32_t seg_size = 0; 1337*b74652f3SRagothaman Jayaraman 1338*b74652f3SRagothaman Jayaraman if (!pkt) { 1339*b74652f3SRagothaman Jayaraman iovec->buf_cnt = 0; 1340*b74652f3SRagothaman Jayaraman return 0; 1341*b74652f3SRagothaman Jayaraman } 1342*b74652f3SRagothaman Jayaraman 1343*b74652f3SRagothaman Jayaraman if (!start_offset) { 1344*b74652f3SRagothaman Jayaraman seg_data = rte_pktmbuf_mtod(pkt, void *); 1345*b74652f3SRagothaman Jayaraman seg_phys = rte_pktmbuf_mtophys(pkt); 1346*b74652f3SRagothaman Jayaraman seg_size = pkt->data_len; 1347*b74652f3SRagothaman Jayaraman } else { 1348*b74652f3SRagothaman Jayaraman while (start_offset >= pkt->data_len) { 1349*b74652f3SRagothaman Jayaraman start_offset -= pkt->data_len; 1350*b74652f3SRagothaman Jayaraman pkt = pkt->next; 1351*b74652f3SRagothaman Jayaraman } 1352*b74652f3SRagothaman Jayaraman 1353*b74652f3SRagothaman Jayaraman seg_data = rte_pktmbuf_mtod_offset(pkt, void *, start_offset); 1354*b74652f3SRagothaman Jayaraman seg_phys = rte_pktmbuf_mtophys_offset(pkt, start_offset); 1355*b74652f3SRagothaman Jayaraman seg_size = pkt->data_len - start_offset; 1356*b74652f3SRagothaman Jayaraman if (!seg_size) 1357*b74652f3SRagothaman Jayaraman return 1; 1358*b74652f3SRagothaman Jayaraman } 1359*b74652f3SRagothaman Jayaraman 1360*b74652f3SRagothaman Jayaraman /* first seg */ 1361*b74652f3SRagothaman Jayaraman iovec->bufs[index].vaddr = seg_data; 1362*b74652f3SRagothaman Jayaraman iovec->bufs[index].dma_addr = seg_phys; 1363*b74652f3SRagothaman Jayaraman iovec->bufs[index].size = seg_size; 1364*b74652f3SRagothaman Jayaraman index++; 1365*b74652f3SRagothaman Jayaraman pkt = pkt->next; 1366*b74652f3SRagothaman Jayaraman 1367*b74652f3SRagothaman Jayaraman while (unlikely(pkt != NULL)) { 1368*b74652f3SRagothaman Jayaraman seg_data = rte_pktmbuf_mtod(pkt, void *); 1369*b74652f3SRagothaman Jayaraman seg_phys = rte_pktmbuf_mtophys(pkt); 1370*b74652f3SRagothaman Jayaraman seg_size = pkt->data_len; 1371*b74652f3SRagothaman Jayaraman if (!seg_size) 1372*b74652f3SRagothaman Jayaraman break; 1373*b74652f3SRagothaman Jayaraman 1374*b74652f3SRagothaman Jayaraman iovec->bufs[index].vaddr = seg_data; 1375*b74652f3SRagothaman Jayaraman iovec->bufs[index].dma_addr = seg_phys; 1376*b74652f3SRagothaman Jayaraman iovec->bufs[index].size = seg_size; 1377*b74652f3SRagothaman Jayaraman 1378*b74652f3SRagothaman Jayaraman index++; 1379*b74652f3SRagothaman Jayaraman 1380*b74652f3SRagothaman Jayaraman pkt = pkt->next; 1381*b74652f3SRagothaman Jayaraman } 1382*b74652f3SRagothaman Jayaraman 1383*b74652f3SRagothaman Jayaraman iovec->buf_cnt = index; 1384*b74652f3SRagothaman Jayaraman return 0; 1385*b74652f3SRagothaman Jayaraman } 1386*b74652f3SRagothaman Jayaraman 1387*b74652f3SRagothaman Jayaraman static __rte_always_inline uint32_t 1388*b74652f3SRagothaman Jayaraman prepare_iov_from_pkt_inplace(struct rte_mbuf *pkt, 1389*b74652f3SRagothaman Jayaraman fc_params_t *param, 1390*b74652f3SRagothaman Jayaraman uint32_t *flags) 1391*b74652f3SRagothaman Jayaraman { 1392*b74652f3SRagothaman Jayaraman uint16_t index = 0; 1393*b74652f3SRagothaman Jayaraman void *seg_data = NULL; 1394*b74652f3SRagothaman Jayaraman phys_addr_t seg_phys; 1395*b74652f3SRagothaman Jayaraman uint32_t seg_size = 0; 1396*b74652f3SRagothaman Jayaraman iov_ptr_t *iovec; 1397*b74652f3SRagothaman Jayaraman 1398*b74652f3SRagothaman Jayaraman seg_data = rte_pktmbuf_mtod(pkt, void *); 1399*b74652f3SRagothaman Jayaraman seg_phys = rte_pktmbuf_mtophys(pkt); 1400*b74652f3SRagothaman Jayaraman seg_size = pkt->data_len; 1401*b74652f3SRagothaman Jayaraman 1402*b74652f3SRagothaman Jayaraman /* first seg */ 1403*b74652f3SRagothaman Jayaraman if (likely(!pkt->next)) { 1404*b74652f3SRagothaman Jayaraman uint32_t headroom, tailroom; 1405*b74652f3SRagothaman Jayaraman 1406*b74652f3SRagothaman Jayaraman *flags |= SINGLE_BUF_INPLACE; 1407*b74652f3SRagothaman Jayaraman headroom = rte_pktmbuf_headroom(pkt); 1408*b74652f3SRagothaman Jayaraman tailroom = rte_pktmbuf_tailroom(pkt); 1409*b74652f3SRagothaman Jayaraman if (likely((headroom >= 24) && 1410*b74652f3SRagothaman Jayaraman (tailroom >= 8))) { 1411*b74652f3SRagothaman Jayaraman /* In 83XX this is prerequivisit for Direct mode */ 1412*b74652f3SRagothaman Jayaraman *flags |= SINGLE_BUF_HEADTAILROOM; 1413*b74652f3SRagothaman Jayaraman } 1414*b74652f3SRagothaman Jayaraman param->bufs[0].vaddr = seg_data; 1415*b74652f3SRagothaman Jayaraman param->bufs[0].dma_addr = seg_phys; 1416*b74652f3SRagothaman Jayaraman param->bufs[0].size = seg_size; 1417*b74652f3SRagothaman Jayaraman return 0; 1418*b74652f3SRagothaman Jayaraman } 1419*b74652f3SRagothaman Jayaraman iovec = param->src_iov; 1420*b74652f3SRagothaman Jayaraman iovec->bufs[index].vaddr = seg_data; 1421*b74652f3SRagothaman Jayaraman iovec->bufs[index].dma_addr = seg_phys; 1422*b74652f3SRagothaman Jayaraman iovec->bufs[index].size = seg_size; 1423*b74652f3SRagothaman Jayaraman index++; 1424*b74652f3SRagothaman Jayaraman pkt = pkt->next; 1425*b74652f3SRagothaman Jayaraman 1426*b74652f3SRagothaman Jayaraman while (unlikely(pkt != NULL)) { 1427*b74652f3SRagothaman Jayaraman seg_data = rte_pktmbuf_mtod(pkt, void *); 1428*b74652f3SRagothaman Jayaraman seg_phys = rte_pktmbuf_mtophys(pkt); 1429*b74652f3SRagothaman Jayaraman seg_size = pkt->data_len; 1430*b74652f3SRagothaman Jayaraman 1431*b74652f3SRagothaman Jayaraman if (!seg_size) 1432*b74652f3SRagothaman Jayaraman break; 1433*b74652f3SRagothaman Jayaraman 1434*b74652f3SRagothaman Jayaraman iovec->bufs[index].vaddr = seg_data; 1435*b74652f3SRagothaman Jayaraman iovec->bufs[index].dma_addr = seg_phys; 1436*b74652f3SRagothaman Jayaraman iovec->bufs[index].size = seg_size; 1437*b74652f3SRagothaman Jayaraman 1438*b74652f3SRagothaman Jayaraman index++; 1439*b74652f3SRagothaman Jayaraman 1440*b74652f3SRagothaman Jayaraman pkt = pkt->next; 1441*b74652f3SRagothaman Jayaraman } 1442*b74652f3SRagothaman Jayaraman 1443*b74652f3SRagothaman Jayaraman iovec->buf_cnt = index; 1444*b74652f3SRagothaman Jayaraman return 0; 1445*b74652f3SRagothaman Jayaraman } 1446*b74652f3SRagothaman Jayaraman 1447*b74652f3SRagothaman Jayaraman static __rte_always_inline void * 1448*b74652f3SRagothaman Jayaraman fill_fc_params(struct rte_crypto_op *cop, 1449*b74652f3SRagothaman Jayaraman struct cpt_sess_misc *sess_misc, 1450*b74652f3SRagothaman Jayaraman void **mdata_ptr, 1451*b74652f3SRagothaman Jayaraman int *op_ret) 1452*b74652f3SRagothaman Jayaraman { 1453*b74652f3SRagothaman Jayaraman uint32_t space = 0; 1454*b74652f3SRagothaman Jayaraman struct rte_crypto_sym_op *sym_op = cop->sym; 1455*b74652f3SRagothaman Jayaraman void *mdata; 1456*b74652f3SRagothaman Jayaraman uintptr_t *op; 1457*b74652f3SRagothaman Jayaraman uint32_t mc_hash_off; 1458*b74652f3SRagothaman Jayaraman uint32_t flags = 0; 1459*b74652f3SRagothaman Jayaraman uint64_t d_offs, d_lens; 1460*b74652f3SRagothaman Jayaraman void *prep_req = NULL; 1461*b74652f3SRagothaman Jayaraman struct rte_mbuf *m_src, *m_dst; 1462*b74652f3SRagothaman Jayaraman uint8_t cpt_op = sess_misc->cpt_op; 1463*b74652f3SRagothaman Jayaraman uint8_t zsk_flag = sess_misc->zsk_flag; 1464*b74652f3SRagothaman Jayaraman uint8_t aes_gcm = sess_misc->aes_gcm; 1465*b74652f3SRagothaman Jayaraman uint16_t mac_len = sess_misc->mac_len; 1466*b74652f3SRagothaman Jayaraman #ifdef CPT_ALWAYS_USE_SG_MODE 1467*b74652f3SRagothaman Jayaraman uint8_t inplace = 0; 1468*b74652f3SRagothaman Jayaraman #else 1469*b74652f3SRagothaman Jayaraman uint8_t inplace = 1; 1470*b74652f3SRagothaman Jayaraman #endif 1471*b74652f3SRagothaman Jayaraman fc_params_t fc_params; 1472*b74652f3SRagothaman Jayaraman char src[SRC_IOV_SIZE]; 1473*b74652f3SRagothaman Jayaraman char dst[SRC_IOV_SIZE]; 1474*b74652f3SRagothaman Jayaraman uint32_t iv_buf[4]; 1475*b74652f3SRagothaman Jayaraman struct cptvf_meta_info *cpt_m_info = 1476*b74652f3SRagothaman Jayaraman (struct cptvf_meta_info *)(*mdata_ptr); 1477*b74652f3SRagothaman Jayaraman 1478*b74652f3SRagothaman Jayaraman if (likely(sess_misc->iv_length)) { 1479*b74652f3SRagothaman Jayaraman flags |= VALID_IV_BUF; 1480*b74652f3SRagothaman Jayaraman fc_params.iv_buf = rte_crypto_op_ctod_offset(cop, 1481*b74652f3SRagothaman Jayaraman uint8_t *, sess_misc->iv_offset); 1482*b74652f3SRagothaman Jayaraman if (sess_misc->aes_ctr && 1483*b74652f3SRagothaman Jayaraman unlikely(sess_misc->iv_length != 16)) { 1484*b74652f3SRagothaman Jayaraman memcpy((uint8_t *)iv_buf, 1485*b74652f3SRagothaman Jayaraman rte_crypto_op_ctod_offset(cop, 1486*b74652f3SRagothaman Jayaraman uint8_t *, sess_misc->iv_offset), 12); 1487*b74652f3SRagothaman Jayaraman iv_buf[3] = rte_cpu_to_be_32(0x1); 1488*b74652f3SRagothaman Jayaraman fc_params.iv_buf = iv_buf; 1489*b74652f3SRagothaman Jayaraman } 1490*b74652f3SRagothaman Jayaraman } 1491*b74652f3SRagothaman Jayaraman 1492*b74652f3SRagothaman Jayaraman if (zsk_flag) { 1493*b74652f3SRagothaman Jayaraman fc_params.auth_iv_buf = rte_crypto_op_ctod_offset(cop, 1494*b74652f3SRagothaman Jayaraman uint8_t *, 1495*b74652f3SRagothaman Jayaraman sess_misc->auth_iv_offset); 1496*b74652f3SRagothaman Jayaraman if (zsk_flag == K_F9) { 1497*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Should not reach here for " 1498*b74652f3SRagothaman Jayaraman "kasumi F9\n"); 1499*b74652f3SRagothaman Jayaraman } 1500*b74652f3SRagothaman Jayaraman if (zsk_flag != ZS_EA) 1501*b74652f3SRagothaman Jayaraman inplace = 0; 1502*b74652f3SRagothaman Jayaraman } 1503*b74652f3SRagothaman Jayaraman m_src = sym_op->m_src; 1504*b74652f3SRagothaman Jayaraman m_dst = sym_op->m_dst; 1505*b74652f3SRagothaman Jayaraman 1506*b74652f3SRagothaman Jayaraman if (aes_gcm) { 1507*b74652f3SRagothaman Jayaraman uint8_t *salt; 1508*b74652f3SRagothaman Jayaraman uint8_t *aad_data; 1509*b74652f3SRagothaman Jayaraman uint16_t aad_len; 1510*b74652f3SRagothaman Jayaraman 1511*b74652f3SRagothaman Jayaraman d_offs = sym_op->aead.data.offset; 1512*b74652f3SRagothaman Jayaraman d_lens = sym_op->aead.data.length; 1513*b74652f3SRagothaman Jayaraman mc_hash_off = sym_op->aead.data.offset + 1514*b74652f3SRagothaman Jayaraman sym_op->aead.data.length; 1515*b74652f3SRagothaman Jayaraman 1516*b74652f3SRagothaman Jayaraman aad_data = sym_op->aead.aad.data; 1517*b74652f3SRagothaman Jayaraman aad_len = sess_misc->aad_length; 1518*b74652f3SRagothaman Jayaraman if (likely((aad_data + aad_len) == 1519*b74652f3SRagothaman Jayaraman rte_pktmbuf_mtod_offset(m_src, 1520*b74652f3SRagothaman Jayaraman uint8_t *, 1521*b74652f3SRagothaman Jayaraman sym_op->aead.data.offset))) { 1522*b74652f3SRagothaman Jayaraman d_offs = (d_offs - aad_len) | (d_offs << 16); 1523*b74652f3SRagothaman Jayaraman d_lens = (d_lens + aad_len) | (d_lens << 32); 1524*b74652f3SRagothaman Jayaraman } else { 1525*b74652f3SRagothaman Jayaraman fc_params.aad_buf.vaddr = sym_op->aead.aad.data; 1526*b74652f3SRagothaman Jayaraman fc_params.aad_buf.dma_addr = sym_op->aead.aad.phys_addr; 1527*b74652f3SRagothaman Jayaraman fc_params.aad_buf.size = aad_len; 1528*b74652f3SRagothaman Jayaraman flags |= VALID_AAD_BUF; 1529*b74652f3SRagothaman Jayaraman inplace = 0; 1530*b74652f3SRagothaman Jayaraman d_offs = d_offs << 16; 1531*b74652f3SRagothaman Jayaraman d_lens = d_lens << 32; 1532*b74652f3SRagothaman Jayaraman } 1533*b74652f3SRagothaman Jayaraman 1534*b74652f3SRagothaman Jayaraman salt = fc_params.iv_buf; 1535*b74652f3SRagothaman Jayaraman if (unlikely(*(uint32_t *)salt != sess_misc->salt)) { 1536*b74652f3SRagothaman Jayaraman cpt_fc_salt_update(SESS_PRIV(sess_misc), salt); 1537*b74652f3SRagothaman Jayaraman sess_misc->salt = *(uint32_t *)salt; 1538*b74652f3SRagothaman Jayaraman } 1539*b74652f3SRagothaman Jayaraman fc_params.iv_buf = salt + 4; 1540*b74652f3SRagothaman Jayaraman if (likely(mac_len)) { 1541*b74652f3SRagothaman Jayaraman struct rte_mbuf *m = (cpt_op & CPT_OP_ENCODE) ? m_dst : 1542*b74652f3SRagothaman Jayaraman m_src; 1543*b74652f3SRagothaman Jayaraman 1544*b74652f3SRagothaman Jayaraman if (!m) 1545*b74652f3SRagothaman Jayaraman m = m_src; 1546*b74652f3SRagothaman Jayaraman 1547*b74652f3SRagothaman Jayaraman /* hmac immediately following data is best case */ 1548*b74652f3SRagothaman Jayaraman if (unlikely(rte_pktmbuf_mtod(m, uint8_t *) + 1549*b74652f3SRagothaman Jayaraman mc_hash_off != 1550*b74652f3SRagothaman Jayaraman (uint8_t *)sym_op->aead.digest.data)) { 1551*b74652f3SRagothaman Jayaraman flags |= VALID_MAC_BUF; 1552*b74652f3SRagothaman Jayaraman fc_params.mac_buf.size = sess_misc->mac_len; 1553*b74652f3SRagothaman Jayaraman fc_params.mac_buf.vaddr = 1554*b74652f3SRagothaman Jayaraman sym_op->aead.digest.data; 1555*b74652f3SRagothaman Jayaraman fc_params.mac_buf.dma_addr = 1556*b74652f3SRagothaman Jayaraman sym_op->aead.digest.phys_addr; 1557*b74652f3SRagothaman Jayaraman inplace = 0; 1558*b74652f3SRagothaman Jayaraman } 1559*b74652f3SRagothaman Jayaraman } 1560*b74652f3SRagothaman Jayaraman } else { 1561*b74652f3SRagothaman Jayaraman d_offs = sym_op->cipher.data.offset; 1562*b74652f3SRagothaman Jayaraman d_lens = sym_op->cipher.data.length; 1563*b74652f3SRagothaman Jayaraman mc_hash_off = sym_op->cipher.data.offset + 1564*b74652f3SRagothaman Jayaraman sym_op->cipher.data.length; 1565*b74652f3SRagothaman Jayaraman d_offs = (d_offs << 16) | sym_op->auth.data.offset; 1566*b74652f3SRagothaman Jayaraman d_lens = (d_lens << 32) | sym_op->auth.data.length; 1567*b74652f3SRagothaman Jayaraman 1568*b74652f3SRagothaman Jayaraman if (mc_hash_off < (sym_op->auth.data.offset + 1569*b74652f3SRagothaman Jayaraman sym_op->auth.data.length)){ 1570*b74652f3SRagothaman Jayaraman mc_hash_off = (sym_op->auth.data.offset + 1571*b74652f3SRagothaman Jayaraman sym_op->auth.data.length); 1572*b74652f3SRagothaman Jayaraman } 1573*b74652f3SRagothaman Jayaraman /* for gmac, salt should be updated like in gcm */ 1574*b74652f3SRagothaman Jayaraman if (unlikely(sess_misc->is_gmac)) { 1575*b74652f3SRagothaman Jayaraman uint8_t *salt; 1576*b74652f3SRagothaman Jayaraman salt = fc_params.iv_buf; 1577*b74652f3SRagothaman Jayaraman if (unlikely(*(uint32_t *)salt != sess_misc->salt)) { 1578*b74652f3SRagothaman Jayaraman cpt_fc_salt_update(SESS_PRIV(sess_misc), salt); 1579*b74652f3SRagothaman Jayaraman sess_misc->salt = *(uint32_t *)salt; 1580*b74652f3SRagothaman Jayaraman } 1581*b74652f3SRagothaman Jayaraman fc_params.iv_buf = salt + 4; 1582*b74652f3SRagothaman Jayaraman } 1583*b74652f3SRagothaman Jayaraman if (likely(mac_len)) { 1584*b74652f3SRagothaman Jayaraman struct rte_mbuf *m; 1585*b74652f3SRagothaman Jayaraman 1586*b74652f3SRagothaman Jayaraman m = (cpt_op & CPT_OP_ENCODE) ? m_dst : m_src; 1587*b74652f3SRagothaman Jayaraman if (!m) 1588*b74652f3SRagothaman Jayaraman m = m_src; 1589*b74652f3SRagothaman Jayaraman 1590*b74652f3SRagothaman Jayaraman /* hmac immediately following data is best case */ 1591*b74652f3SRagothaman Jayaraman if (unlikely(rte_pktmbuf_mtod(m, uint8_t *) + 1592*b74652f3SRagothaman Jayaraman mc_hash_off != 1593*b74652f3SRagothaman Jayaraman (uint8_t *)sym_op->auth.digest.data)) { 1594*b74652f3SRagothaman Jayaraman flags |= VALID_MAC_BUF; 1595*b74652f3SRagothaman Jayaraman fc_params.mac_buf.size = 1596*b74652f3SRagothaman Jayaraman sess_misc->mac_len; 1597*b74652f3SRagothaman Jayaraman fc_params.mac_buf.vaddr = 1598*b74652f3SRagothaman Jayaraman sym_op->auth.digest.data; 1599*b74652f3SRagothaman Jayaraman fc_params.mac_buf.dma_addr = 1600*b74652f3SRagothaman Jayaraman sym_op->auth.digest.phys_addr; 1601*b74652f3SRagothaman Jayaraman inplace = 0; 1602*b74652f3SRagothaman Jayaraman } 1603*b74652f3SRagothaman Jayaraman } 1604*b74652f3SRagothaman Jayaraman } 1605*b74652f3SRagothaman Jayaraman fc_params.ctx_buf.vaddr = SESS_PRIV(sess_misc); 1606*b74652f3SRagothaman Jayaraman fc_params.ctx_buf.dma_addr = sess_misc->ctx_dma_addr; 1607*b74652f3SRagothaman Jayaraman 1608*b74652f3SRagothaman Jayaraman if (unlikely(sess_misc->is_null || sess_misc->cpt_op == CPT_OP_DECODE)) 1609*b74652f3SRagothaman Jayaraman inplace = 0; 1610*b74652f3SRagothaman Jayaraman 1611*b74652f3SRagothaman Jayaraman if (likely(!m_dst && inplace)) { 1612*b74652f3SRagothaman Jayaraman /* Case of single buffer without AAD buf or 1613*b74652f3SRagothaman Jayaraman * separate mac buf in place and 1614*b74652f3SRagothaman Jayaraman * not air crypto 1615*b74652f3SRagothaman Jayaraman */ 1616*b74652f3SRagothaman Jayaraman fc_params.dst_iov = fc_params.src_iov = (void *)src; 1617*b74652f3SRagothaman Jayaraman 1618*b74652f3SRagothaman Jayaraman if (unlikely(prepare_iov_from_pkt_inplace(m_src, 1619*b74652f3SRagothaman Jayaraman &fc_params, 1620*b74652f3SRagothaman Jayaraman &flags))) { 1621*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Prepare inplace src iov failed"); 1622*b74652f3SRagothaman Jayaraman *op_ret = -1; 1623*b74652f3SRagothaman Jayaraman return NULL; 1624*b74652f3SRagothaman Jayaraman } 1625*b74652f3SRagothaman Jayaraman 1626*b74652f3SRagothaman Jayaraman } else { 1627*b74652f3SRagothaman Jayaraman /* Out of place processing */ 1628*b74652f3SRagothaman Jayaraman fc_params.src_iov = (void *)src; 1629*b74652f3SRagothaman Jayaraman fc_params.dst_iov = (void *)dst; 1630*b74652f3SRagothaman Jayaraman 1631*b74652f3SRagothaman Jayaraman /* Store SG I/O in the api for reuse */ 1632*b74652f3SRagothaman Jayaraman if (prepare_iov_from_pkt(m_src, fc_params.src_iov, 0)) { 1633*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Prepare src iov failed"); 1634*b74652f3SRagothaman Jayaraman *op_ret = -1; 1635*b74652f3SRagothaman Jayaraman return NULL; 1636*b74652f3SRagothaman Jayaraman } 1637*b74652f3SRagothaman Jayaraman 1638*b74652f3SRagothaman Jayaraman if (unlikely(m_dst != NULL)) { 1639*b74652f3SRagothaman Jayaraman uint32_t pkt_len; 1640*b74652f3SRagothaman Jayaraman 1641*b74652f3SRagothaman Jayaraman /* Try to make room as much as src has */ 1642*b74652f3SRagothaman Jayaraman m_dst = sym_op->m_dst; 1643*b74652f3SRagothaman Jayaraman pkt_len = rte_pktmbuf_pkt_len(m_dst); 1644*b74652f3SRagothaman Jayaraman 1645*b74652f3SRagothaman Jayaraman if (unlikely(pkt_len < rte_pktmbuf_pkt_len(m_src))) { 1646*b74652f3SRagothaman Jayaraman pkt_len = rte_pktmbuf_pkt_len(m_src) - pkt_len; 1647*b74652f3SRagothaman Jayaraman if (!rte_pktmbuf_append(m_dst, pkt_len)) { 1648*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Not enough space in " 1649*b74652f3SRagothaman Jayaraman "m_dst %p, need %u" 1650*b74652f3SRagothaman Jayaraman " more", 1651*b74652f3SRagothaman Jayaraman m_dst, pkt_len); 1652*b74652f3SRagothaman Jayaraman return NULL; 1653*b74652f3SRagothaman Jayaraman } 1654*b74652f3SRagothaman Jayaraman } 1655*b74652f3SRagothaman Jayaraman 1656*b74652f3SRagothaman Jayaraman if (prepare_iov_from_pkt(m_dst, fc_params.dst_iov, 0)) { 1657*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Prepare dst iov failed for " 1658*b74652f3SRagothaman Jayaraman "m_dst %p", m_dst); 1659*b74652f3SRagothaman Jayaraman return NULL; 1660*b74652f3SRagothaman Jayaraman } 1661*b74652f3SRagothaman Jayaraman } else { 1662*b74652f3SRagothaman Jayaraman fc_params.dst_iov = (void *)src; 1663*b74652f3SRagothaman Jayaraman } 1664*b74652f3SRagothaman Jayaraman } 1665*b74652f3SRagothaman Jayaraman 1666*b74652f3SRagothaman Jayaraman if (likely(flags & SINGLE_BUF_HEADTAILROOM)) 1667*b74652f3SRagothaman Jayaraman mdata = alloc_op_meta(m_src, 1668*b74652f3SRagothaman Jayaraman &fc_params.meta_buf, 1669*b74652f3SRagothaman Jayaraman cpt_m_info->cptvf_op_sb_mlen, 1670*b74652f3SRagothaman Jayaraman cpt_m_info->cptvf_meta_pool); 1671*b74652f3SRagothaman Jayaraman else 1672*b74652f3SRagothaman Jayaraman mdata = alloc_op_meta(NULL, 1673*b74652f3SRagothaman Jayaraman &fc_params.meta_buf, 1674*b74652f3SRagothaman Jayaraman cpt_m_info->cptvf_op_mlen, 1675*b74652f3SRagothaman Jayaraman cpt_m_info->cptvf_meta_pool); 1676*b74652f3SRagothaman Jayaraman 1677*b74652f3SRagothaman Jayaraman if (unlikely(mdata == NULL)) { 1678*b74652f3SRagothaman Jayaraman CPT_LOG_DP_ERR("Error allocating meta buffer for request"); 1679*b74652f3SRagothaman Jayaraman return NULL; 1680*b74652f3SRagothaman Jayaraman } 1681*b74652f3SRagothaman Jayaraman 1682*b74652f3SRagothaman Jayaraman op = (uintptr_t *)((uintptr_t)mdata & (uintptr_t)~1ull); 1683*b74652f3SRagothaman Jayaraman op[0] = (uintptr_t)mdata; 1684*b74652f3SRagothaman Jayaraman op[1] = (uintptr_t)cop; 1685*b74652f3SRagothaman Jayaraman op[2] = op[3] = 0; /* Used to indicate auth verify */ 1686*b74652f3SRagothaman Jayaraman space += 4 * sizeof(uint64_t); 1687*b74652f3SRagothaman Jayaraman 1688*b74652f3SRagothaman Jayaraman fc_params.meta_buf.vaddr = (uint8_t *)op + space; 1689*b74652f3SRagothaman Jayaraman fc_params.meta_buf.dma_addr += space; 1690*b74652f3SRagothaman Jayaraman fc_params.meta_buf.size -= space; 1691*b74652f3SRagothaman Jayaraman 1692*b74652f3SRagothaman Jayaraman /* Finally prepare the instruction */ 1693*b74652f3SRagothaman Jayaraman if (cpt_op & CPT_OP_ENCODE) 1694*b74652f3SRagothaman Jayaraman prep_req = cpt_fc_enc_hmac_prep(flags, d_offs, d_lens, 1695*b74652f3SRagothaman Jayaraman &fc_params, op, op_ret); 1696*b74652f3SRagothaman Jayaraman 1697*b74652f3SRagothaman Jayaraman if (unlikely(!prep_req)) 1698*b74652f3SRagothaman Jayaraman free_op_meta(mdata, cpt_m_info->cptvf_meta_pool); 1699*b74652f3SRagothaman Jayaraman *mdata_ptr = mdata; 1700*b74652f3SRagothaman Jayaraman return prep_req; 1701*b74652f3SRagothaman Jayaraman } 1702*b74652f3SRagothaman Jayaraman 170343d01767SNithin Dabilpuram #endif /*_CPT_UCODE_H_ */ 1704