1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2024 Intel Corporation
3 */
4
5 #include <rte_cryptodev.h>
6 #include <cryptodev_pmd.h>
7 #include "qat_sym_session.h"
8 #include "qat_sym.h"
9 #include "qat_asym.h"
10 #include "qat_crypto.h"
11 #include "qat_crypto_pmd_gens.h"
12
13
14 static struct rte_cryptodev_capabilities qat_sym_crypto_legacy_caps_gen5[] = {
15 QAT_SYM_PLAIN_AUTH_CAP(SHA1,
16 CAP_SET(block_size, 64),
17 CAP_RNG(digest_size, 1, 20, 1)),
18 QAT_SYM_AUTH_CAP(SHA224,
19 CAP_SET(block_size, 64),
20 CAP_RNG_ZERO(key_size), CAP_RNG(digest_size, 1, 28, 1),
21 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
22 QAT_SYM_AUTH_CAP(SHA224_HMAC,
23 CAP_SET(block_size, 64),
24 CAP_RNG(key_size, 1, 64, 1), CAP_RNG(digest_size, 1, 28, 1),
25 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
26 QAT_SYM_AUTH_CAP(SHA1_HMAC,
27 CAP_SET(block_size, 64),
28 CAP_RNG(key_size, 1, 64, 1), CAP_RNG(digest_size, 1, 20, 1),
29 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
30 QAT_SYM_CIPHER_CAP(SM4_ECB,
31 CAP_SET(block_size, 16),
32 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(iv_size, 0, 0, 0)),
33 };
34
35 static struct rte_cryptodev_capabilities qat_sym_crypto_caps_gen5[] = {
36 QAT_SYM_CIPHER_CAP(AES_CBC,
37 CAP_SET(block_size, 16),
38 CAP_RNG(key_size, 16, 32, 8), CAP_RNG(iv_size, 16, 16, 0)),
39 QAT_SYM_AUTH_CAP(SHA256_HMAC,
40 CAP_SET(block_size, 64),
41 CAP_RNG(key_size, 1, 64, 1), CAP_RNG(digest_size, 1, 32, 1),
42 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
43 QAT_SYM_AUTH_CAP(SHA384_HMAC,
44 CAP_SET(block_size, 128),
45 CAP_RNG(key_size, 1, 128, 1), CAP_RNG(digest_size, 1, 48, 1),
46 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
47 QAT_SYM_AUTH_CAP(SHA512_HMAC,
48 CAP_SET(block_size, 128),
49 CAP_RNG(key_size, 1, 128, 1), CAP_RNG(digest_size, 1, 64, 1),
50 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
51 QAT_SYM_AUTH_CAP(AES_XCBC_MAC,
52 CAP_SET(block_size, 16),
53 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(digest_size, 12, 12, 0),
54 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
55 QAT_SYM_AUTH_CAP(AES_CMAC,
56 CAP_SET(block_size, 16),
57 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(digest_size, 4, 16, 4),
58 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
59 QAT_SYM_CIPHER_CAP(AES_DOCSISBPI,
60 CAP_SET(block_size, 16),
61 CAP_RNG(key_size, 16, 32, 16), CAP_RNG(iv_size, 16, 16, 0)),
62 QAT_SYM_AUTH_CAP(NULL,
63 CAP_SET(block_size, 1),
64 CAP_RNG_ZERO(key_size), CAP_RNG_ZERO(digest_size),
65 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
66 QAT_SYM_CIPHER_CAP(NULL,
67 CAP_SET(block_size, 1),
68 CAP_RNG_ZERO(key_size), CAP_RNG_ZERO(iv_size)),
69 QAT_SYM_AUTH_CAP(SHA256,
70 CAP_SET(block_size, 64),
71 CAP_RNG_ZERO(key_size), CAP_RNG(digest_size, 1, 32, 1),
72 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
73 QAT_SYM_AUTH_CAP(SHA384,
74 CAP_SET(block_size, 128),
75 CAP_RNG_ZERO(key_size), CAP_RNG(digest_size, 1, 48, 1),
76 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
77 QAT_SYM_AUTH_CAP(SHA512,
78 CAP_SET(block_size, 128),
79 CAP_RNG_ZERO(key_size), CAP_RNG(digest_size, 1, 64, 1),
80 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
81 QAT_SYM_CIPHER_CAP(AES_CTR,
82 CAP_SET(block_size, 16),
83 CAP_RNG(key_size, 16, 32, 8), CAP_RNG(iv_size, 16, 16, 0)),
84 QAT_SYM_AEAD_CAP(AES_GCM,
85 CAP_SET(block_size, 16),
86 CAP_RNG(key_size, 16, 32, 8), CAP_RNG(digest_size, 8, 16, 4),
87 CAP_RNG(aad_size, 0, 240, 1), CAP_RNG(iv_size, 0, 12, 12)),
88 QAT_SYM_AEAD_CAP(AES_CCM,
89 CAP_SET(block_size, 16),
90 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(digest_size, 4, 16, 2),
91 CAP_RNG(aad_size, 0, 224, 1), CAP_RNG(iv_size, 7, 13, 1)),
92 QAT_SYM_AUTH_CAP(AES_GMAC,
93 CAP_SET(block_size, 16),
94 CAP_RNG(key_size, 16, 32, 8), CAP_RNG(digest_size, 8, 16, 4),
95 CAP_RNG_ZERO(aad_size), CAP_RNG(iv_size, 0, 12, 12)),
96 QAT_SYM_AEAD_CAP(CHACHA20_POLY1305,
97 CAP_SET(block_size, 64),
98 CAP_RNG(key_size, 32, 32, 0),
99 CAP_RNG(digest_size, 16, 16, 0),
100 CAP_RNG(aad_size, 0, 240, 1), CAP_RNG(iv_size, 12, 12, 0)),
101 QAT_SYM_CIPHER_CAP(SM4_CBC,
102 CAP_SET(block_size, 16),
103 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(iv_size, 16, 16, 0)),
104 QAT_SYM_CIPHER_CAP(SM4_CTR,
105 CAP_SET(block_size, 16),
106 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(iv_size, 16, 16, 0)),
107 QAT_SYM_PLAIN_AUTH_CAP(SM3,
108 CAP_SET(block_size, 64),
109 CAP_RNG(digest_size, 32, 32, 0)),
110 QAT_SYM_AUTH_CAP(SM3_HMAC,
111 CAP_SET(block_size, 64),
112 CAP_RNG(key_size, 16, 64, 4), CAP_RNG(digest_size, 32, 32, 0),
113 CAP_RNG_ZERO(aad_size), CAP_RNG_ZERO(iv_size)),
114 QAT_SYM_CIPHER_CAP(ZUC_EEA3,
115 CAP_SET(block_size, 16),
116 CAP_RNG(key_size, 16, 32, 16), CAP_RNG(iv_size, 16, 25, 1)),
117 QAT_SYM_AUTH_CAP(ZUC_EIA3,
118 CAP_SET(block_size, 16),
119 CAP_RNG(key_size, 16, 32, 16), CAP_RNG(digest_size, 4, 16, 4),
120 CAP_RNG_ZERO(aad_size), CAP_RNG(iv_size, 16, 25, 1)),
121 QAT_SYM_CIPHER_CAP(SNOW3G_UEA2,
122 CAP_SET(block_size, 16),
123 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(iv_size, 16, 16, 0)),
124 QAT_SYM_AUTH_CAP(SNOW3G_UIA2,
125 CAP_SET(block_size, 16),
126 CAP_RNG(key_size, 16, 16, 0), CAP_RNG(digest_size, 4, 4, 0),
127 CAP_RNG_ZERO(aad_size), CAP_RNG(iv_size, 16, 16, 0)),
128 RTE_CRYPTODEV_END_OF_CAPABILITIES_LIST()
129 };
130
131 static int
check_cipher_capa(const struct rte_cryptodev_capabilities * cap,enum rte_crypto_cipher_algorithm algo)132 check_cipher_capa(const struct rte_cryptodev_capabilities *cap,
133 enum rte_crypto_cipher_algorithm algo)
134 {
135 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
136 return 0;
137 if (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_CIPHER)
138 return 0;
139 if (cap->sym.cipher.algo != algo)
140 return 0;
141 return 1;
142 }
143
144 static int
check_auth_capa(const struct rte_cryptodev_capabilities * cap,enum rte_crypto_auth_algorithm algo)145 check_auth_capa(const struct rte_cryptodev_capabilities *cap,
146 enum rte_crypto_auth_algorithm algo)
147 {
148 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
149 return 0;
150 if (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH)
151 return 0;
152 if (cap->sym.auth.algo != algo)
153 return 0;
154 return 1;
155 }
156
157 static int
qat_sym_crypto_cap_get_gen5(struct qat_cryptodev_private * internals,const char * capa_memz_name,const uint16_t __rte_unused slice_map)158 qat_sym_crypto_cap_get_gen5(struct qat_cryptodev_private *internals,
159 const char *capa_memz_name,
160 const uint16_t __rte_unused slice_map)
161 {
162 uint32_t legacy_capa_num, capa_num;
163 uint32_t size = sizeof(qat_sym_crypto_caps_gen5);
164 uint32_t legacy_size = sizeof(qat_sym_crypto_legacy_caps_gen5);
165 uint32_t i, iter = 0;
166 uint32_t curr_capa = 0;
167 legacy_capa_num = legacy_size/sizeof(struct rte_cryptodev_capabilities);
168 capa_num = RTE_DIM(qat_sym_crypto_caps_gen5);
169
170 if (unlikely(internals->qat_dev->options.legacy_alg))
171 size = size + legacy_size;
172
173 internals->capa_mz = rte_memzone_lookup(capa_memz_name);
174 if (internals->capa_mz == NULL) {
175 internals->capa_mz = rte_memzone_reserve(capa_memz_name,
176 size, rte_socket_id(), 0);
177 if (internals->capa_mz == NULL) {
178 QAT_LOG(DEBUG,
179 "Error allocating memzone for capabilities");
180 return -1;
181 }
182 }
183
184 struct rte_cryptodev_capabilities *addr =
185 (struct rte_cryptodev_capabilities *)
186 internals->capa_mz->addr;
187
188 struct rte_cryptodev_capabilities *capabilities;
189
190 if (unlikely(internals->qat_dev->options.legacy_alg)) {
191 capabilities = qat_sym_crypto_legacy_caps_gen5;
192 memcpy(addr, capabilities, legacy_size);
193 addr += legacy_capa_num;
194 }
195 capabilities = qat_sym_crypto_caps_gen5;
196
197 for (i = 0; i < capa_num; i++, iter++) {
198 if (slice_map & ICP_ACCEL_MASK_ZUC_256_SLICE && (
199 check_auth_capa(&capabilities[iter],
200 RTE_CRYPTO_AUTH_ZUC_EIA3) ||
201 check_cipher_capa(&capabilities[iter],
202 RTE_CRYPTO_CIPHER_ZUC_EEA3))) {
203 continue;
204 }
205
206 memcpy(addr + curr_capa, capabilities + iter,
207 sizeof(struct rte_cryptodev_capabilities));
208 curr_capa++;
209 }
210 internals->qat_dev_capabilities = internals->capa_mz->addr;
211
212 return 0;
213 }
214
215 static int
qat_sym_crypto_set_session_gen5(void * cdev,void * session)216 qat_sym_crypto_set_session_gen5(void *cdev, void *session)
217 {
218 struct qat_sym_session *ctx = session;
219 enum rte_proc_type_t proc_type = rte_eal_process_type();
220 int ret;
221
222 if (proc_type == RTE_PROC_AUTO || proc_type == RTE_PROC_INVALID)
223 return -EINVAL;
224
225 ret = qat_sym_crypto_set_session_gen4(cdev, session);
226
227 if (ret == -ENOTSUP) {
228 /* GEN4 returning -ENOTSUP as it cannot handle some mixed algo,
229 * this is addressed by GEN5
230 */
231 if ((ctx->aes_cmac ||
232 ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_NULL) &&
233 (ctx->qat_cipher_alg ==
234 ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 ||
235 ctx->qat_cipher_alg ==
236 ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 ||
237 ctx->qat_cipher_alg == ICP_QAT_HW_CIPHER_ALGO_ZUC_256)) {
238 qat_sym_session_set_ext_hash_flags_gen2(ctx, 0);
239 } else if ((ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_256_MAC_32 ||
240 ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_256_MAC_64 ||
241 ctx->qat_hash_alg == ICP_QAT_HW_AUTH_ALGO_ZUC_256_MAC_128) &&
242 ctx->qat_cipher_alg != ICP_QAT_HW_CIPHER_ALGO_ZUC_256) {
243 qat_sym_session_set_ext_hash_flags_gen2(ctx,
244 1 << ICP_QAT_FW_AUTH_HDR_FLAG_ZUC_EIA3_BITPOS);
245 }
246
247 ret = 0;
248 }
249
250 return ret;
251 }
252
RTE_INIT(qat_sym_crypto_gen5_init)253 RTE_INIT(qat_sym_crypto_gen5_init)
254 {
255 qat_sym_gen_dev_ops[QAT_GEN5].cryptodev_ops = &qat_sym_crypto_ops_gen1;
256 qat_sym_gen_dev_ops[QAT_GEN5].get_capabilities =
257 qat_sym_crypto_cap_get_gen5;
258 qat_sym_gen_dev_ops[QAT_GEN5].set_session =
259 qat_sym_crypto_set_session_gen5;
260 qat_sym_gen_dev_ops[QAT_GEN5].set_raw_dp_ctx =
261 qat_sym_configure_raw_dp_ctx_gen4;
262 qat_sym_gen_dev_ops[QAT_GEN5].get_feature_flags =
263 qat_sym_crypto_feature_flags_get_gen1;
264 qat_sym_gen_dev_ops[QAT_GEN5].create_security_ctx =
265 qat_sym_create_security_gen1;
266 }
267
RTE_INIT(qat_asym_crypto_gen5_init)268 RTE_INIT(qat_asym_crypto_gen5_init)
269 {
270 qat_asym_gen_dev_ops[QAT_GEN5].cryptodev_ops =
271 &qat_asym_crypto_ops_gen1;
272 qat_asym_gen_dev_ops[QAT_GEN5].get_capabilities =
273 qat_asym_crypto_cap_get_gen1;
274 qat_asym_gen_dev_ops[QAT_GEN5].get_feature_flags =
275 qat_asym_crypto_feature_flags_get_gen1;
276 qat_asym_gen_dev_ops[QAT_GEN5].set_session =
277 qat_asym_crypto_set_session_gen1;
278 }
279