xref: /dpdk/drivers/crypto/bcmfs/bcmfs_sym_engine.c (revision cd5db556ace9f01521a039a75f0b1a35cfcd6bcb)
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(C) 2020 Broadcom.
3  * All rights reserved.
4  */
5 
6 #include <stdbool.h>
7 #include <string.h>
8 
9 #include <rte_common.h>
10 #include <rte_cryptodev.h>
11 #include <rte_crypto_sym.h>
12 
13 #include "bcmfs_logs.h"
14 #include "bcmfs_sym_defs.h"
15 #include "bcmfs_dev_msg.h"
16 #include "bcmfs_sym_req.h"
17 #include "bcmfs_sym_engine.h"
18 
19 enum spu2_cipher_type {
20 	SPU2_CIPHER_TYPE_NONE = 0x0,
21 	SPU2_CIPHER_TYPE_AES128 = 0x1,
22 	SPU2_CIPHER_TYPE_AES192 = 0x2,
23 	SPU2_CIPHER_TYPE_AES256 = 0x3,
24 	SPU2_CIPHER_TYPE_DES = 0x4,
25 	SPU2_CIPHER_TYPE_3DES = 0x5,
26 	SPU2_CIPHER_TYPE_LAST
27 };
28 
29 enum spu2_cipher_mode {
30 	SPU2_CIPHER_MODE_ECB = 0x0,
31 	SPU2_CIPHER_MODE_CBC = 0x1,
32 	SPU2_CIPHER_MODE_CTR = 0x2,
33 	SPU2_CIPHER_MODE_CFB = 0x3,
34 	SPU2_CIPHER_MODE_OFB = 0x4,
35 	SPU2_CIPHER_MODE_XTS = 0x5,
36 	SPU2_CIPHER_MODE_CCM = 0x6,
37 	SPU2_CIPHER_MODE_GCM = 0x7,
38 	SPU2_CIPHER_MODE_LAST
39 };
40 
41 enum spu2_hash_type {
42 	SPU2_HASH_TYPE_NONE = 0x0,
43 	SPU2_HASH_TYPE_AES128 = 0x1,
44 	SPU2_HASH_TYPE_AES192 = 0x2,
45 	SPU2_HASH_TYPE_AES256 = 0x3,
46 	SPU2_HASH_TYPE_MD5 = 0x6,
47 	SPU2_HASH_TYPE_SHA1 = 0x7,
48 	SPU2_HASH_TYPE_SHA224 = 0x8,
49 	SPU2_HASH_TYPE_SHA256 = 0x9,
50 	SPU2_HASH_TYPE_SHA384 = 0xa,
51 	SPU2_HASH_TYPE_SHA512 = 0xb,
52 	SPU2_HASH_TYPE_SHA512_224 = 0xc,
53 	SPU2_HASH_TYPE_SHA512_256 = 0xd,
54 	SPU2_HASH_TYPE_SHA3_224 = 0xe,
55 	SPU2_HASH_TYPE_SHA3_256 = 0xf,
56 	SPU2_HASH_TYPE_SHA3_384 = 0x10,
57 	SPU2_HASH_TYPE_SHA3_512 = 0x11,
58 	SPU2_HASH_TYPE_LAST
59 };
60 
61 enum spu2_hash_mode {
62 	SPU2_HASH_MODE_CMAC = 0x0,
63 	SPU2_HASH_MODE_CBC_MAC = 0x1,
64 	SPU2_HASH_MODE_XCBC_MAC = 0x2,
65 	SPU2_HASH_MODE_HMAC = 0x3,
66 	SPU2_HASH_MODE_RABIN = 0x4,
67 	SPU2_HASH_MODE_CCM = 0x5,
68 	SPU2_HASH_MODE_GCM = 0x6,
69 	SPU2_HASH_MODE_RESERVED = 0x7,
70 	SPU2_HASH_MODE_LAST
71 };
72 
73 enum spu2_proto_sel {
74 	SPU2_PROTO_RESV = 0,
75 	SPU2_MACSEC_SECTAG8_ECB = 1,
76 	SPU2_MACSEC_SECTAG8_SCB = 2,
77 	SPU2_MACSEC_SECTAG16 = 3,
78 	SPU2_MACSEC_SECTAG16_8_XPN = 4,
79 	SPU2_IPSEC = 5,
80 	SPU2_IPSEC_ESN = 6,
81 	SPU2_TLS_CIPHER = 7,
82 	SPU2_TLS_AEAD = 8,
83 	SPU2_DTLS_CIPHER = 9,
84 	SPU2_DTLS_AEAD = 10
85 };
86 
87 /* SPU2 response size */
88 #define SPU2_STATUS_LEN			2
89 
90 /* Metadata settings in response */
91 enum spu2_ret_md_opts {
92 	SPU2_RET_NO_MD = 0,		/* return no metadata */
93 	SPU2_RET_FMD_OMD = 1,		/* return both FMD and OMD */
94 	SPU2_RET_FMD_ONLY = 2,		/* return only FMD */
95 	SPU2_RET_FMD_OMD_IV = 3,	/* return FMD and OMD with just IVs */
96 };
97 
98 /* FMD ctrl0 field masks */
99 #define SPU2_CIPH_ENCRYPT_EN            0x1 /* 0: decrypt, 1: encrypt */
100 #define SPU2_CIPH_TYPE_SHIFT              4
101 #define SPU2_CIPH_MODE                0xF00 /* one of spu2_cipher_mode */
102 #define SPU2_CIPH_MODE_SHIFT              8
103 #define SPU2_CFB_MASK                0x7000 /* cipher feedback mask */
104 #define SPU2_CFB_MASK_SHIFT              12
105 #define SPU2_PROTO_SEL             0xF00000 /* MACsec, IPsec, TLS... */
106 #define SPU2_PROTO_SEL_SHIFT             20
107 #define SPU2_HASH_FIRST           0x1000000 /* 1: hash input is input pkt
108 					     * data
109 					     */
110 #define SPU2_CHK_TAG              0x2000000 /* 1: check digest provided */
111 #define SPU2_HASH_TYPE          0x1F0000000 /* one of spu2_hash_type */
112 #define SPU2_HASH_TYPE_SHIFT             28
113 #define SPU2_HASH_MODE         0xF000000000 /* one of spu2_hash_mode */
114 #define SPU2_HASH_MODE_SHIFT             36
115 #define SPU2_CIPH_PAD_EN     0x100000000000 /* 1: Add pad to end of payload for
116 					     *    enc
117 					     */
118 #define SPU2_CIPH_PAD      0xFF000000000000 /* cipher pad value */
119 #define SPU2_CIPH_PAD_SHIFT              48
120 
121 /* FMD ctrl1 field masks */
122 #define SPU2_TAG_LOC                    0x1 /* 1: end of payload, 0: undef */
123 #define SPU2_HAS_FR_DATA                0x2 /* 1: msg has frame data */
124 #define SPU2_HAS_AAD1                   0x4 /* 1: msg has AAD1 field */
125 #define SPU2_HAS_NAAD                   0x8 /* 1: msg has NAAD field */
126 #define SPU2_HAS_AAD2                  0x10 /* 1: msg has AAD2 field */
127 #define SPU2_HAS_ESN                   0x20 /* 1: msg has ESN field */
128 #define SPU2_HASH_KEY_LEN            0xFF00 /* len of hash key in bytes.
129 					     * HMAC only.
130 					     */
131 #define SPU2_HASH_KEY_LEN_SHIFT           8
132 #define SPU2_CIPH_KEY_LEN         0xFF00000 /* len of cipher key in bytes */
133 #define SPU2_CIPH_KEY_LEN_SHIFT          20
134 #define SPU2_GENIV               0x10000000 /* 1: hw generates IV */
135 #define SPU2_HASH_IV             0x20000000 /* 1: IV incl in hash */
136 #define SPU2_RET_IV              0x40000000 /* 1: return IV in output msg
137 					     *    b4 payload
138 					     */
139 #define SPU2_RET_IV_LEN         0xF00000000 /* length in bytes of IV returned.
140 					     * 0 = 16 bytes
141 					     */
142 #define SPU2_RET_IV_LEN_SHIFT            32
143 #define SPU2_IV_OFFSET         0xF000000000 /* gen IV offset */
144 #define SPU2_IV_OFFSET_SHIFT             36
145 #define SPU2_IV_LEN          0x1F0000000000 /* length of input IV in bytes */
146 #define SPU2_IV_LEN_SHIFT                40
147 #define SPU2_HASH_TAG_LEN  0x7F000000000000 /* hash tag length in bytes */
148 #define SPU2_HASH_TAG_LEN_SHIFT          48
149 #define SPU2_RETURN_MD    0x300000000000000 /* return metadata */
150 #define SPU2_RETURN_MD_SHIFT             56
151 #define SPU2_RETURN_FD    0x400000000000000
152 #define SPU2_RETURN_AAD1  0x800000000000000
153 #define SPU2_RETURN_NAAD 0x1000000000000000
154 #define SPU2_RETURN_AAD2 0x2000000000000000
155 #define SPU2_RETURN_PAY  0x4000000000000000 /* return payload */
156 
157 /* FMD ctrl2 field masks */
158 #define SPU2_AAD1_OFFSET              0xFFF /* byte offset of AAD1 field */
159 #define SPU2_AAD1_LEN               0xFF000 /* length of AAD1 in bytes */
160 #define SPU2_AAD1_LEN_SHIFT              12
161 #define SPU2_AAD2_OFFSET         0xFFF00000 /* byte offset of AAD2 field */
162 #define SPU2_AAD2_OFFSET_SHIFT           20
163 #define SPU2_PL_OFFSET   0xFFFFFFFF00000000 /* payload offset from AAD2 */
164 #define SPU2_PL_OFFSET_SHIFT             32
165 
166 /* FMD ctrl3 field masks */
167 #define SPU2_PL_LEN              0xFFFFFFFF /* payload length in bytes */
168 #define SPU2_TLS_LEN         0xFFFF00000000 /* TLS encrypt: cipher len
169 					     * TLS decrypt: compressed len
170 					     */
171 #define SPU2_TLS_LEN_SHIFT               32
172 
173 /*
174  * Max value that can be represented in the Payload Length field of the
175  * ctrl3 word of FMD.
176  */
177 #define SPU2_MAX_PAYLOAD  SPU2_PL_LEN
178 
179 #define SPU2_VAL_NONE	0
180 
181 /* CCM B_0 field definitions, common for SPU-M and SPU2 */
182 #define CCM_B0_ADATA		0x40
183 #define CCM_B0_ADATA_SHIFT	   6
184 #define CCM_B0_M_PRIME		0x38
185 #define CCM_B0_M_PRIME_SHIFT	   3
186 #define CCM_B0_L_PRIME		0x07
187 #define CCM_B0_L_PRIME_SHIFT	   0
188 #define CCM_ESP_L_VALUE		   4
189 
190 static uint16_t
spu2_cipher_type_xlate(enum rte_crypto_cipher_algorithm cipher_alg,enum spu2_cipher_type * spu2_type,struct fsattr * key)191 spu2_cipher_type_xlate(enum rte_crypto_cipher_algorithm cipher_alg,
192 		       enum spu2_cipher_type *spu2_type,
193 		       struct fsattr *key)
194 {
195 	int ret = 0;
196 	int key_size = fsattr_sz(key);
197 
198 	if (cipher_alg == RTE_CRYPTO_CIPHER_AES_XTS)
199 		key_size = key_size / 2;
200 
201 	switch (key_size) {
202 	case BCMFS_CRYPTO_AES128:
203 		*spu2_type = SPU2_CIPHER_TYPE_AES128;
204 		break;
205 	case BCMFS_CRYPTO_AES192:
206 		*spu2_type = SPU2_CIPHER_TYPE_AES192;
207 		break;
208 	case BCMFS_CRYPTO_AES256:
209 		*spu2_type = SPU2_CIPHER_TYPE_AES256;
210 		break;
211 	default:
212 		ret = -EINVAL;
213 	}
214 
215 	return ret;
216 }
217 
218 static int
spu2_hash_xlate(enum rte_crypto_auth_algorithm auth_alg,struct fsattr * key,enum spu2_hash_type * spu2_type,enum spu2_hash_mode * spu2_mode)219 spu2_hash_xlate(enum rte_crypto_auth_algorithm auth_alg,
220 		struct fsattr *key,
221 		enum spu2_hash_type *spu2_type,
222 		enum spu2_hash_mode *spu2_mode)
223 {
224 	*spu2_mode = 0;
225 
226 	switch (auth_alg) {
227 	case RTE_CRYPTO_AUTH_NULL:
228 		*spu2_type = SPU2_HASH_TYPE_NONE;
229 		break;
230 	case RTE_CRYPTO_AUTH_MD5:
231 		*spu2_type = SPU2_HASH_TYPE_MD5;
232 		break;
233 	case RTE_CRYPTO_AUTH_MD5_HMAC:
234 		*spu2_type = SPU2_HASH_TYPE_MD5;
235 		*spu2_mode = SPU2_HASH_MODE_HMAC;
236 		break;
237 	case RTE_CRYPTO_AUTH_SHA1:
238 		*spu2_type = SPU2_HASH_TYPE_SHA1;
239 		break;
240 	case RTE_CRYPTO_AUTH_SHA1_HMAC:
241 		*spu2_type = SPU2_HASH_TYPE_SHA1;
242 		*spu2_mode = SPU2_HASH_MODE_HMAC;
243 		break;
244 	case RTE_CRYPTO_AUTH_SHA224:
245 		*spu2_type = SPU2_HASH_TYPE_SHA224;
246 		break;
247 	case RTE_CRYPTO_AUTH_SHA224_HMAC:
248 		*spu2_type = SPU2_HASH_TYPE_SHA224;
249 		*spu2_mode = SPU2_HASH_MODE_HMAC;
250 		break;
251 	case RTE_CRYPTO_AUTH_SHA256:
252 		*spu2_type = SPU2_HASH_TYPE_SHA256;
253 		break;
254 	case RTE_CRYPTO_AUTH_SHA256_HMAC:
255 		*spu2_type = SPU2_HASH_TYPE_SHA256;
256 		*spu2_mode = SPU2_HASH_MODE_HMAC;
257 		break;
258 	case RTE_CRYPTO_AUTH_SHA384:
259 		*spu2_type = SPU2_HASH_TYPE_SHA384;
260 		break;
261 	case RTE_CRYPTO_AUTH_SHA384_HMAC:
262 		*spu2_type = SPU2_HASH_TYPE_SHA384;
263 		*spu2_mode = SPU2_HASH_MODE_HMAC;
264 		break;
265 	case RTE_CRYPTO_AUTH_SHA512:
266 		*spu2_type = SPU2_HASH_TYPE_SHA512;
267 		break;
268 	case RTE_CRYPTO_AUTH_SHA512_HMAC:
269 		*spu2_type = SPU2_HASH_TYPE_SHA512;
270 		*spu2_mode = SPU2_HASH_MODE_HMAC;
271 		break;
272 	case RTE_CRYPTO_AUTH_SHA3_224:
273 		*spu2_type = SPU2_HASH_TYPE_SHA3_224;
274 		break;
275 	case RTE_CRYPTO_AUTH_SHA3_224_HMAC:
276 		*spu2_type = SPU2_HASH_TYPE_SHA3_224;
277 		*spu2_mode = SPU2_HASH_MODE_HMAC;
278 		break;
279 	case RTE_CRYPTO_AUTH_SHA3_256:
280 		*spu2_type = SPU2_HASH_TYPE_SHA3_256;
281 		break;
282 	case RTE_CRYPTO_AUTH_SHA3_256_HMAC:
283 		*spu2_type = SPU2_HASH_TYPE_SHA3_256;
284 		*spu2_mode = SPU2_HASH_MODE_HMAC;
285 		break;
286 	case RTE_CRYPTO_AUTH_SHA3_384:
287 		*spu2_type = SPU2_HASH_TYPE_SHA3_384;
288 		break;
289 	case RTE_CRYPTO_AUTH_SHA3_384_HMAC:
290 		*spu2_type = SPU2_HASH_TYPE_SHA3_384;
291 		*spu2_mode = SPU2_HASH_MODE_HMAC;
292 		break;
293 	case RTE_CRYPTO_AUTH_SHA3_512:
294 		*spu2_type = SPU2_HASH_TYPE_SHA3_512;
295 		break;
296 	case RTE_CRYPTO_AUTH_SHA3_512_HMAC:
297 		*spu2_type = SPU2_HASH_TYPE_SHA3_512;
298 		*spu2_mode = SPU2_HASH_MODE_HMAC;
299 		break;
300 	case RTE_CRYPTO_AUTH_AES_XCBC_MAC:
301 		*spu2_mode = SPU2_HASH_MODE_XCBC_MAC;
302 		switch (fsattr_sz(key)) {
303 		case BCMFS_CRYPTO_AES128:
304 			*spu2_type = SPU2_HASH_TYPE_AES128;
305 			break;
306 		case BCMFS_CRYPTO_AES192:
307 			*spu2_type = SPU2_HASH_TYPE_AES192;
308 			break;
309 		case BCMFS_CRYPTO_AES256:
310 			*spu2_type = SPU2_HASH_TYPE_AES256;
311 			break;
312 		default:
313 			return -EINVAL;
314 		}
315 		break;
316 	case RTE_CRYPTO_AUTH_AES_CMAC:
317 		*spu2_mode = SPU2_HASH_MODE_CMAC;
318 		switch (fsattr_sz(key)) {
319 		case BCMFS_CRYPTO_AES128:
320 			*spu2_type = SPU2_HASH_TYPE_AES128;
321 			break;
322 		case BCMFS_CRYPTO_AES192:
323 			*spu2_type = SPU2_HASH_TYPE_AES192;
324 			break;
325 		case BCMFS_CRYPTO_AES256:
326 			*spu2_type = SPU2_HASH_TYPE_AES256;
327 			break;
328 		default:
329 			return -EINVAL;
330 		}
331 		break;
332 	case RTE_CRYPTO_AUTH_AES_GMAC:
333 		*spu2_mode = SPU2_HASH_MODE_GCM;
334 		switch (fsattr_sz(key)) {
335 		case BCMFS_CRYPTO_AES128:
336 			*spu2_type = SPU2_HASH_TYPE_AES128;
337 			break;
338 		case BCMFS_CRYPTO_AES192:
339 			*spu2_type = SPU2_HASH_TYPE_AES192;
340 			break;
341 		case BCMFS_CRYPTO_AES256:
342 			*spu2_type = SPU2_HASH_TYPE_AES256;
343 			break;
344 		default:
345 			return -EINVAL;
346 		}
347 		break;
348 	case RTE_CRYPTO_AUTH_AES_CBC_MAC:
349 		*spu2_mode = SPU2_HASH_MODE_CBC_MAC;
350 		switch (fsattr_sz(key)) {
351 		case BCMFS_CRYPTO_AES128:
352 			*spu2_type = SPU2_HASH_TYPE_AES128;
353 			break;
354 		case BCMFS_CRYPTO_AES192:
355 			*spu2_type = SPU2_HASH_TYPE_AES192;
356 			break;
357 		case BCMFS_CRYPTO_AES256:
358 			*spu2_type = SPU2_HASH_TYPE_AES256;
359 			break;
360 		default:
361 			return -EINVAL;
362 		}
363 		break;
364 	default:
365 		return -EINVAL;
366 	}
367 
368 	return 0;
369 }
370 
371 static int
spu2_cipher_xlate(enum rte_crypto_cipher_algorithm cipher_alg,struct fsattr * key,enum spu2_cipher_type * spu2_type,enum spu2_cipher_mode * spu2_mode)372 spu2_cipher_xlate(enum rte_crypto_cipher_algorithm cipher_alg,
373 		  struct fsattr *key,
374 		  enum spu2_cipher_type *spu2_type,
375 		  enum spu2_cipher_mode *spu2_mode)
376 {
377 	int ret = 0;
378 
379 	switch (cipher_alg) {
380 	case RTE_CRYPTO_CIPHER_NULL:
381 		*spu2_type = SPU2_CIPHER_TYPE_NONE;
382 		break;
383 	case RTE_CRYPTO_CIPHER_DES_CBC:
384 		*spu2_mode =  SPU2_CIPHER_MODE_CBC;
385 		*spu2_type = SPU2_CIPHER_TYPE_DES;
386 		break;
387 	case RTE_CRYPTO_CIPHER_3DES_ECB:
388 		*spu2_mode =  SPU2_CIPHER_MODE_ECB;
389 		*spu2_type = SPU2_CIPHER_TYPE_3DES;
390 		break;
391 	case RTE_CRYPTO_CIPHER_3DES_CBC:
392 		*spu2_mode =  SPU2_CIPHER_MODE_CBC;
393 		*spu2_type = SPU2_CIPHER_TYPE_3DES;
394 		break;
395 	case RTE_CRYPTO_CIPHER_AES_CBC:
396 		*spu2_mode =  SPU2_CIPHER_MODE_CBC;
397 		ret = spu2_cipher_type_xlate(cipher_alg, spu2_type, key);
398 		break;
399 	case RTE_CRYPTO_CIPHER_AES_ECB:
400 		*spu2_mode =  SPU2_CIPHER_MODE_ECB;
401 		ret = spu2_cipher_type_xlate(cipher_alg, spu2_type, key);
402 		break;
403 	case RTE_CRYPTO_CIPHER_AES_CTR:
404 		*spu2_mode =  SPU2_CIPHER_MODE_CTR;
405 		ret = spu2_cipher_type_xlate(cipher_alg, spu2_type, key);
406 		break;
407 	case RTE_CRYPTO_CIPHER_AES_XTS:
408 		*spu2_mode =  SPU2_CIPHER_MODE_XTS;
409 		ret = spu2_cipher_type_xlate(cipher_alg, spu2_type, key);
410 		break;
411 	default:
412 		return -EINVAL;
413 	}
414 
415 	return ret;
416 }
417 
418 static void
spu2_fmd_ctrl0_write(struct spu2_fmd * fmd,bool is_inbound,bool auth_first,enum spu2_proto_sel protocol,enum spu2_cipher_type cipher_type,enum spu2_cipher_mode cipher_mode,enum spu2_hash_type auth_type,enum spu2_hash_mode auth_mode)419 spu2_fmd_ctrl0_write(struct spu2_fmd *fmd,
420 		     bool is_inbound, bool auth_first,
421 		     enum spu2_proto_sel protocol,
422 		     enum spu2_cipher_type cipher_type,
423 		     enum spu2_cipher_mode cipher_mode,
424 		     enum spu2_hash_type auth_type,
425 		     enum spu2_hash_mode auth_mode)
426 {
427 	uint64_t ctrl0 = 0;
428 
429 	if (cipher_type != SPU2_CIPHER_TYPE_NONE && !is_inbound)
430 		ctrl0 |= SPU2_CIPH_ENCRYPT_EN;
431 
432 	ctrl0 |= ((uint64_t)cipher_type << SPU2_CIPH_TYPE_SHIFT) |
433 		  ((uint64_t)cipher_mode << SPU2_CIPH_MODE_SHIFT);
434 
435 	if (protocol != SPU2_PROTO_RESV)
436 		ctrl0 |= (uint64_t)protocol << SPU2_PROTO_SEL_SHIFT;
437 
438 	if (auth_first)
439 		ctrl0 |= SPU2_HASH_FIRST;
440 
441 	if (is_inbound && auth_type != SPU2_HASH_TYPE_NONE)
442 		ctrl0 |= SPU2_CHK_TAG;
443 
444 	ctrl0 |= (((uint64_t)auth_type << SPU2_HASH_TYPE_SHIFT) |
445 		  ((uint64_t)auth_mode << SPU2_HASH_MODE_SHIFT));
446 
447 	fmd->ctrl0 = ctrl0;
448 
449 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
450 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "ctrl0:", &fmd->ctrl0, sizeof(uint64_t));
451 #endif
452 }
453 
454 static void
spu2_fmd_ctrl1_write(struct spu2_fmd * fmd,bool is_inbound,uint64_t assoc_size,uint64_t auth_key_len,uint64_t cipher_key_len,bool gen_iv,bool hash_iv,bool return_iv,uint64_t ret_iv_len,uint64_t ret_iv_offset,uint64_t cipher_iv_len,uint64_t digest_size,bool return_payload,bool return_md)455 spu2_fmd_ctrl1_write(struct spu2_fmd *fmd, bool is_inbound,
456 		     uint64_t assoc_size, uint64_t auth_key_len,
457 		     uint64_t cipher_key_len, bool gen_iv, bool hash_iv,
458 		     bool return_iv, uint64_t ret_iv_len,
459 		     uint64_t ret_iv_offset, uint64_t cipher_iv_len,
460 		     uint64_t digest_size, bool return_payload, bool return_md)
461 {
462 	uint64_t ctrl1 = 0;
463 
464 	if (is_inbound && digest_size != 0)
465 		ctrl1 |= SPU2_TAG_LOC;
466 
467 	if (assoc_size != 0)
468 		ctrl1 |= SPU2_HAS_AAD2;
469 
470 	if (auth_key_len != 0)
471 		ctrl1 |= ((auth_key_len << SPU2_HASH_KEY_LEN_SHIFT) &
472 			  SPU2_HASH_KEY_LEN);
473 
474 	if (cipher_key_len != 0)
475 		ctrl1 |= ((cipher_key_len << SPU2_CIPH_KEY_LEN_SHIFT) &
476 			  SPU2_CIPH_KEY_LEN);
477 
478 	if (gen_iv)
479 		ctrl1 |= SPU2_GENIV;
480 
481 	if (hash_iv)
482 		ctrl1 |= SPU2_HASH_IV;
483 
484 	if (return_iv) {
485 		ctrl1 |= SPU2_RET_IV;
486 		ctrl1 |= ret_iv_len << SPU2_RET_IV_LEN_SHIFT;
487 		ctrl1 |= ret_iv_offset << SPU2_IV_OFFSET_SHIFT;
488 	}
489 
490 	ctrl1 |= ((cipher_iv_len << SPU2_IV_LEN_SHIFT) & SPU2_IV_LEN);
491 
492 	if (digest_size != 0) {
493 		ctrl1 |= ((digest_size << SPU2_HASH_TAG_LEN_SHIFT) &
494 			  SPU2_HASH_TAG_LEN);
495 	}
496 
497 	/*
498 	 * Let's ask for the output pkt to include FMD, but don't need to
499 	 * get keys and IVs back in OMD.
500 	 */
501 	if (return_md)
502 		ctrl1 |= ((uint64_t)SPU2_RET_FMD_ONLY << SPU2_RETURN_MD_SHIFT);
503 	else
504 		ctrl1 |= ((uint64_t)SPU2_RET_NO_MD << SPU2_RETURN_MD_SHIFT);
505 
506 	/* Crypto API does not get assoc data back. So no need for AAD2. */
507 
508 	if (return_payload)
509 		ctrl1 |= SPU2_RETURN_PAY;
510 
511 	fmd->ctrl1 = ctrl1;
512 
513 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
514 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "ctrl1:", &fmd->ctrl1, sizeof(uint64_t));
515 #endif
516 }
517 
518 static void
spu2_fmd_ctrl2_write(struct spu2_fmd * fmd,uint64_t cipher_offset,uint64_t auth_key_len __rte_unused,uint64_t auth_iv_len __rte_unused,uint64_t cipher_key_len __rte_unused,uint64_t cipher_iv_len __rte_unused)519 spu2_fmd_ctrl2_write(struct spu2_fmd *fmd, uint64_t cipher_offset,
520 		     uint64_t auth_key_len __rte_unused,
521 		     uint64_t auth_iv_len  __rte_unused,
522 		     uint64_t cipher_key_len  __rte_unused,
523 		     uint64_t cipher_iv_len  __rte_unused)
524 {
525 	uint64_t aad1_offset;
526 	uint64_t aad2_offset;
527 	uint16_t aad1_len = 0;
528 	uint64_t payload_offset;
529 
530 	/* AAD1 offset is from start of FD. FD length always 0. */
531 	aad1_offset = 0;
532 
533 	aad2_offset = aad1_offset;
534 	payload_offset = cipher_offset;
535 	fmd->ctrl2 = aad1_offset |
536 		     (aad1_len << SPU2_AAD1_LEN_SHIFT) |
537 		     (aad2_offset << SPU2_AAD2_OFFSET_SHIFT) |
538 		     (payload_offset << SPU2_PL_OFFSET_SHIFT);
539 
540 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
541 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "ctrl2:", &fmd->ctrl2, sizeof(uint64_t));
542 #endif
543 }
544 
545 static void
spu2_fmd_ctrl3_write(struct spu2_fmd * fmd,uint64_t payload_len)546 spu2_fmd_ctrl3_write(struct spu2_fmd *fmd, uint64_t payload_len)
547 {
548 	fmd->ctrl3 = payload_len & SPU2_PL_LEN;
549 
550 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
551 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "ctrl3:", &fmd->ctrl3, sizeof(uint64_t));
552 #endif
553 }
554 
555 int
bcmfs_crypto_build_auth_req(struct bcmfs_sym_request * sreq,enum rte_crypto_auth_algorithm a_alg,enum rte_crypto_auth_operation auth_op,struct fsattr * src,struct fsattr * dst,struct fsattr * mac,struct fsattr * auth_key,struct fsattr * iv)556 bcmfs_crypto_build_auth_req(struct bcmfs_sym_request *sreq,
557 			    enum rte_crypto_auth_algorithm a_alg,
558 			    enum rte_crypto_auth_operation auth_op,
559 			    struct fsattr *src, struct fsattr *dst,
560 			    struct fsattr *mac, struct fsattr *auth_key,
561 			    struct fsattr *iv)
562 {
563 	int ret;
564 	uint64_t dst_size;
565 	int src_index = 0;
566 	struct spu2_fmd *fmd;
567 	uint64_t payload_len;
568 	uint32_t src_msg_len = 0;
569 	enum spu2_hash_mode spu2_auth_mode;
570 	enum spu2_hash_type spu2_auth_type = SPU2_HASH_TYPE_NONE;
571 	uint64_t iv_size = (iv != NULL) ? fsattr_sz(iv) : 0;
572 	uint64_t auth_ksize = (auth_key != NULL) ? fsattr_sz(auth_key) : 0;
573 	bool is_inbound = (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY);
574 
575 	if (src == NULL)
576 		return -EINVAL;
577 
578 	payload_len = fsattr_sz(src);
579 	if (!payload_len) {
580 		BCMFS_DP_LOG(ERR, "null payload not supported");
581 		return -EINVAL;
582 	}
583 
584 	/* one of dst or mac should not be NULL */
585 	if (dst == NULL && mac == NULL)
586 		return -EINVAL;
587 
588 	if (auth_op == RTE_CRYPTO_AUTH_OP_GENERATE && dst != NULL)
589 		dst_size = fsattr_sz(dst);
590 	else if (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY && mac != NULL)
591 		dst_size = fsattr_sz(mac);
592 	else
593 		return -EINVAL;
594 
595 	/* spu2 hash algorithm and hash algorithm mode */
596 	ret = spu2_hash_xlate(a_alg, auth_key, &spu2_auth_type,
597 			      &spu2_auth_mode);
598 	if (ret)
599 		return -EINVAL;
600 
601 	fmd  = &sreq->fmd;
602 
603 	spu2_fmd_ctrl0_write(fmd, is_inbound, SPU2_VAL_NONE,
604 			     SPU2_PROTO_RESV, SPU2_VAL_NONE,
605 			     SPU2_VAL_NONE, spu2_auth_type, spu2_auth_mode);
606 
607 	spu2_fmd_ctrl1_write(fmd, is_inbound, SPU2_VAL_NONE,
608 			     auth_ksize, SPU2_VAL_NONE, false,
609 			     false, SPU2_VAL_NONE, SPU2_VAL_NONE,
610 			     SPU2_VAL_NONE, iv_size,
611 			     dst_size, SPU2_VAL_NONE, SPU2_VAL_NONE);
612 
613 	memset(&fmd->ctrl2, 0, sizeof(uint64_t));
614 
615 	spu2_fmd_ctrl3_write(fmd, fsattr_sz(src));
616 
617 	/* FMD */
618 	sreq->msgs.srcs_addr[src_index] = sreq->fptr;
619 	src_msg_len += sizeof(*fmd);
620 
621 	/* Start of OMD */
622 	if (auth_ksize != 0) {
623 		memcpy((uint8_t *)fmd + src_msg_len, fsattr_va(auth_key),
624 		       auth_ksize);
625 		src_msg_len += auth_ksize;
626 	}
627 
628 	if (iv_size != 0) {
629 		memcpy((uint8_t *)fmd + src_msg_len, fsattr_va(iv),
630 		       iv_size);
631 		src_msg_len += iv_size;
632 	} /* End of OMD */
633 
634 	sreq->msgs.srcs_len[src_index] = src_msg_len;
635 	src_index++;
636 
637 	sreq->msgs.srcs_addr[src_index] = fsattr_pa(src);
638 	sreq->msgs.srcs_len[src_index] = fsattr_sz(src);
639 	src_index++;
640 
641 	/*
642 	 * In case of authentication verify operation, use input mac data to
643 	 * SPU2 engine.
644 	 */
645 	if (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY && mac != NULL) {
646 		sreq->msgs.srcs_addr[src_index] = fsattr_pa(mac);
647 		sreq->msgs.srcs_len[src_index] = fsattr_sz(mac);
648 		src_index++;
649 	}
650 	sreq->msgs.srcs_count = src_index;
651 
652 	/*
653 	 * Output packet contains actual output from SPU2 and
654 	 * the status packet, so the dsts_count is always 2  below.
655 	 */
656 	if (auth_op == RTE_CRYPTO_AUTH_OP_GENERATE) {
657 		sreq->msgs.dsts_addr[0] = fsattr_pa(dst);
658 		sreq->msgs.dsts_len[0] = fsattr_sz(dst);
659 	} else {
660 		/*
661 		 * In case of authentication verify operation, provide dummy
662 		 * location to SPU2 engine to generate hash. This is needed
663 		 * because SPU2 generates hash even in case of verify operation.
664 		 */
665 		sreq->msgs.dsts_addr[0] = sreq->dptr;
666 		sreq->msgs.dsts_len[0] = fsattr_sz(mac);
667 	}
668 
669 	sreq->msgs.dsts_addr[1] = sreq->rptr;
670 	sreq->msgs.dsts_len[1] = SPU2_STATUS_LEN;
671 	sreq->msgs.dsts_count = 2;
672 
673 	return 0;
674 }
675 
676 int
bcmfs_crypto_build_cipher_req(struct bcmfs_sym_request * sreq,enum rte_crypto_cipher_algorithm calgo,enum rte_crypto_cipher_operation cipher_op,struct fsattr * src,struct fsattr * dst,struct fsattr * cipher_key,struct fsattr * iv)677 bcmfs_crypto_build_cipher_req(struct bcmfs_sym_request *sreq,
678 			      enum rte_crypto_cipher_algorithm calgo,
679 			      enum rte_crypto_cipher_operation cipher_op,
680 			      struct fsattr *src, struct fsattr *dst,
681 			      struct fsattr *cipher_key, struct fsattr *iv)
682 {
683 	int ret = 0;
684 	int src_index = 0;
685 	struct spu2_fmd *fmd;
686 	uint32_t src_msg_len = 0;
687 	enum spu2_cipher_mode spu2_ciph_mode = 0;
688 	enum spu2_cipher_type spu2_ciph_type = SPU2_CIPHER_TYPE_NONE;
689 	bool is_inbound = (cipher_op == RTE_CRYPTO_CIPHER_OP_DECRYPT);
690 
691 	if (src == NULL || dst == NULL || iv == NULL)
692 		return -EINVAL;
693 
694 	fmd  = &sreq->fmd;
695 
696 	/* spu2 cipher algorithm and cipher algorithm mode */
697 	ret = spu2_cipher_xlate(calgo, cipher_key,
698 				&spu2_ciph_type, &spu2_ciph_mode);
699 	if (ret)
700 		return -EINVAL;
701 
702 	spu2_fmd_ctrl0_write(fmd, is_inbound, SPU2_VAL_NONE,
703 			     SPU2_PROTO_RESV, spu2_ciph_type, spu2_ciph_mode,
704 			     SPU2_VAL_NONE, SPU2_VAL_NONE);
705 
706 	spu2_fmd_ctrl1_write(fmd, SPU2_VAL_NONE, SPU2_VAL_NONE, SPU2_VAL_NONE,
707 			     fsattr_sz(cipher_key), false, false,
708 			     SPU2_VAL_NONE, SPU2_VAL_NONE, SPU2_VAL_NONE,
709 			     fsattr_sz(iv), SPU2_VAL_NONE, SPU2_VAL_NONE,
710 			     SPU2_VAL_NONE);
711 
712 	/* Nothing for FMD2 */
713 	memset(&fmd->ctrl2, 0, sizeof(uint64_t));
714 
715 	spu2_fmd_ctrl3_write(fmd, fsattr_sz(src));
716 
717 	/* FMD */
718 	sreq->msgs.srcs_addr[src_index] = sreq->fptr;
719 	src_msg_len += sizeof(*fmd);
720 
721 	/* Start of OMD */
722 	if (cipher_key != NULL && fsattr_sz(cipher_key) != 0) {
723 		uint8_t *cipher_buf = (uint8_t *)fmd + src_msg_len;
724 		if (calgo == RTE_CRYPTO_CIPHER_AES_XTS) {
725 			uint32_t xts_keylen = fsattr_sz(cipher_key) / 2;
726 			memcpy(cipher_buf,
727 			       (uint8_t *)fsattr_va(cipher_key) + xts_keylen,
728 			       xts_keylen);
729 			memcpy(cipher_buf + xts_keylen,
730 			       fsattr_va(cipher_key), xts_keylen);
731 		} else {
732 			memcpy(cipher_buf, fsattr_va(cipher_key),
733 			       fsattr_sz(cipher_key));
734 		}
735 
736 		src_msg_len += fsattr_sz(cipher_key);
737 	}
738 
739 	if (iv != NULL && fsattr_sz(iv) != 0) {
740 		memcpy((uint8_t *)fmd + src_msg_len,
741 		       fsattr_va(iv), fsattr_sz(iv));
742 		src_msg_len +=  fsattr_sz(iv);
743 	} /* End of OMD */
744 
745 	sreq->msgs.srcs_len[src_index] = src_msg_len;
746 	src_index++;
747 
748 	sreq->msgs.srcs_addr[src_index] = fsattr_pa(src);
749 	sreq->msgs.srcs_len[src_index] = fsattr_sz(src);
750 	src_index++;
751 	sreq->msgs.srcs_count = src_index;
752 
753 	/**
754 	 * Output packet contains actual output from SPU2 and
755 	 * the status packet, so the dsts_count is always 2  below.
756 	 */
757 	sreq->msgs.dsts_addr[0] = fsattr_pa(dst);
758 	sreq->msgs.dsts_len[0] = fsattr_sz(dst);
759 
760 	sreq->msgs.dsts_addr[1] = sreq->rptr;
761 	sreq->msgs.dsts_len[1] = SPU2_STATUS_LEN;
762 	sreq->msgs.dsts_count = 2;
763 
764 	return 0;
765 }
766 
767 int
bcmfs_crypto_build_chain_request(struct bcmfs_sym_request * sreq,enum rte_crypto_cipher_algorithm cipher_alg,enum rte_crypto_cipher_operation cipher_op __rte_unused,enum rte_crypto_auth_algorithm auth_alg,enum rte_crypto_auth_operation auth_op,struct fsattr * src,struct fsattr * dst,struct fsattr * cipher_key,struct fsattr * auth_key,struct fsattr * iv,struct fsattr * aad,struct fsattr * digest,bool cipher_first)768 bcmfs_crypto_build_chain_request(struct bcmfs_sym_request *sreq,
769 				 enum rte_crypto_cipher_algorithm cipher_alg,
770 				 enum rte_crypto_cipher_operation cipher_op __rte_unused,
771 				 enum rte_crypto_auth_algorithm auth_alg,
772 				 enum rte_crypto_auth_operation auth_op,
773 				 struct fsattr *src, struct fsattr *dst,
774 				 struct fsattr *cipher_key,
775 				 struct fsattr *auth_key,
776 				 struct fsattr *iv, struct fsattr *aad,
777 				 struct fsattr *digest, bool cipher_first)
778 {
779 	int ret = 0;
780 	int src_index = 0;
781 	int dst_index = 0;
782 	bool auth_first = 0;
783 	struct spu2_fmd *fmd;
784 	uint64_t payload_len;
785 	uint32_t src_msg_len = 0;
786 	enum spu2_cipher_mode spu2_ciph_mode = 0;
787 	enum spu2_hash_mode spu2_auth_mode = 0;
788 	enum spu2_cipher_type spu2_ciph_type = SPU2_CIPHER_TYPE_NONE;
789 	uint64_t auth_ksize = (auth_key != NULL) ?
790 				fsattr_sz(auth_key) : 0;
791 	uint64_t cipher_ksize = (cipher_key != NULL) ?
792 					fsattr_sz(cipher_key) : 0;
793 	uint64_t iv_size = (iv != NULL) ? fsattr_sz(iv) : 0;
794 	uint64_t digest_size = (digest != NULL) ?
795 					fsattr_sz(digest) : 0;
796 	uint64_t aad_size = (aad != NULL) ?
797 				fsattr_sz(aad) : 0;
798 	enum spu2_hash_type spu2_auth_type = SPU2_HASH_TYPE_NONE;
799 	bool is_inbound = (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY);
800 
801 	if (src == NULL)
802 		return -EINVAL;
803 
804 	payload_len = fsattr_sz(src);
805 	if (!payload_len) {
806 		BCMFS_DP_LOG(ERR, "null payload not supported");
807 		return -EINVAL;
808 	}
809 
810 	/* spu2 hash algorithm and hash algorithm mode */
811 	ret = spu2_hash_xlate(auth_alg, auth_key, &spu2_auth_type,
812 			      &spu2_auth_mode);
813 	if (ret)
814 		return -EINVAL;
815 
816 	/* spu2 cipher algorithm and cipher algorithm mode */
817 	ret = spu2_cipher_xlate(cipher_alg, cipher_key, &spu2_ciph_type,
818 				&spu2_ciph_mode);
819 	if (ret) {
820 		BCMFS_DP_LOG(ERR, "cipher xlate error");
821 		return -EINVAL;
822 	}
823 
824 	auth_first = cipher_first ? 0 : 1;
825 
826 	fmd  = &sreq->fmd;
827 
828 	spu2_fmd_ctrl0_write(fmd, is_inbound, auth_first, SPU2_PROTO_RESV,
829 			     spu2_ciph_type, spu2_ciph_mode,
830 			     spu2_auth_type, spu2_auth_mode);
831 
832 	spu2_fmd_ctrl1_write(fmd, is_inbound, aad_size, auth_ksize,
833 			     cipher_ksize, false, false, SPU2_VAL_NONE,
834 			     SPU2_VAL_NONE, SPU2_VAL_NONE, iv_size,
835 			     digest_size, false, SPU2_VAL_NONE);
836 
837 	spu2_fmd_ctrl2_write(fmd, aad_size, auth_ksize, 0,
838 			     cipher_ksize, iv_size);
839 
840 	spu2_fmd_ctrl3_write(fmd, payload_len);
841 
842 	/* FMD */
843 	sreq->msgs.srcs_addr[src_index] = sreq->fptr;
844 	src_msg_len += sizeof(*fmd);
845 
846 	/* Start of OMD */
847 	if (auth_ksize != 0) {
848 		memcpy((uint8_t *)fmd + src_msg_len,
849 		       fsattr_va(auth_key), auth_ksize);
850 		src_msg_len += auth_ksize;
851 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
852 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "auth key:", fsattr_va(auth_key),
853 			     auth_ksize);
854 #endif
855 	}
856 
857 	if (cipher_ksize != 0) {
858 		memcpy((uint8_t *)fmd + src_msg_len,
859 		       fsattr_va(cipher_key), cipher_ksize);
860 		src_msg_len += cipher_ksize;
861 
862 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
863 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "cipher key:", fsattr_va(cipher_key),
864 			     cipher_ksize);
865 #endif
866 	}
867 
868 	if (iv_size != 0) {
869 		memcpy((uint8_t *)fmd + src_msg_len,
870 		       fsattr_va(iv), iv_size);
871 		src_msg_len += iv_size;
872 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
873 		BCMFS_DP_HEXDUMP_LOG(DEBUG, "iv key:", fsattr_va(iv),
874 				     iv_size);
875 #endif
876 	} /* End of OMD */
877 
878 	sreq->msgs.srcs_len[src_index] = src_msg_len;
879 
880 	if (aad_size != 0) {
881 		if (fsattr_sz(aad) < BCMFS_AAD_THRESH_LEN) {
882 			memcpy((uint8_t *)fmd + src_msg_len, fsattr_va(aad), aad_size);
883 			sreq->msgs.srcs_len[src_index] += aad_size;
884 		} else {
885 			src_index++;
886 			sreq->msgs.srcs_addr[src_index] = fsattr_pa(aad);
887 			sreq->msgs.srcs_len[src_index] = aad_size;
888 		}
889 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
890 		BCMFS_DP_HEXDUMP_LOG(DEBUG, "aad :", fsattr_va(aad),
891 				     aad_size);
892 #endif
893 	}
894 
895 	src_index++;
896 
897 	sreq->msgs.srcs_addr[src_index] = fsattr_pa(src);
898 	sreq->msgs.srcs_len[src_index] = fsattr_sz(src);
899 	src_index++;
900 
901 	if (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY && digest != NULL &&
902 	    fsattr_sz(digest) != 0) {
903 		sreq->msgs.srcs_addr[src_index] = fsattr_pa(digest);
904 		sreq->msgs.srcs_len[src_index] = fsattr_sz(digest);
905 		src_index++;
906 	}
907 	sreq->msgs.srcs_count = src_index;
908 
909 	if (dst != NULL) {
910 		sreq->msgs.dsts_addr[dst_index] = fsattr_pa(dst);
911 		sreq->msgs.dsts_len[dst_index] = fsattr_sz(dst);
912 		dst_index++;
913 	}
914 
915 	if (auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
916 		/*
917 		 * In case of decryption digest data is generated by
918 		 * SPU2 engine  but application doesn't need digest
919 		 * as such. So program dummy location to capture
920 		 * digest data
921 		 */
922 		if (digest_size != 0) {
923 			sreq->msgs.dsts_addr[dst_index] =
924 				sreq->dptr;
925 			sreq->msgs.dsts_len[dst_index] =
926 				fsattr_sz(digest);
927 			dst_index++;
928 		}
929 	} else {
930 		if (digest_size != 0) {
931 			sreq->msgs.dsts_addr[dst_index] =
932 				fsattr_pa(digest);
933 			sreq->msgs.dsts_len[dst_index] =
934 				fsattr_sz(digest);
935 			dst_index++;
936 		}
937 	}
938 
939 	sreq->msgs.dsts_addr[dst_index] = sreq->rptr;
940 	sreq->msgs.dsts_len[dst_index] = SPU2_STATUS_LEN;
941 	dst_index++;
942 	sreq->msgs.dsts_count = dst_index;
943 
944 	return 0;
945 }
946 
947 static void
bcmfs_crypto_ccm_update_iv(uint8_t * ivbuf,uint64_t * ivlen,bool is_esp)948 bcmfs_crypto_ccm_update_iv(uint8_t *ivbuf,
949 			   uint64_t *ivlen, bool is_esp)
950 {
951 	int L;  /* size of length field, in bytes */
952 
953 	/*
954 	 * In RFC4309 mode, L is fixed at 4 bytes; otherwise, IV from
955 	 * testmgr contains (L-1) in bottom 3 bits of first byte,
956 	 * per RFC 3610.
957 	 */
958 	if (is_esp)
959 		L = CCM_ESP_L_VALUE;
960 	else
961 		L = ((ivbuf[0] & CCM_B0_L_PRIME) >>
962 		      CCM_B0_L_PRIME_SHIFT) + 1;
963 
964 	/* SPU2 doesn't want these length bytes nor the first byte... */
965 	*ivlen -= (1 + L);
966 	memmove(ivbuf, &ivbuf[1], *ivlen);
967 }
968 
969 int
bcmfs_crypto_build_aead_request(struct bcmfs_sym_request * sreq,enum rte_crypto_aead_algorithm ae_algo,enum rte_crypto_aead_operation aeop,struct fsattr * src,struct fsattr * dst,struct fsattr * key,struct fsattr * iv,struct fsattr * aad,struct fsattr * digest)970 bcmfs_crypto_build_aead_request(struct bcmfs_sym_request *sreq,
971 				enum rte_crypto_aead_algorithm ae_algo,
972 				enum rte_crypto_aead_operation aeop,
973 				struct fsattr *src, struct fsattr *dst,
974 				struct fsattr *key, struct fsattr *iv,
975 				struct fsattr *aad, struct fsattr *digest)
976 {
977 	int src_index = 0;
978 	int dst_index = 0;
979 	bool auth_first = 0;
980 	struct spu2_fmd *fmd;
981 	uint64_t payload_len;
982 	uint32_t src_msg_len = 0;
983 	uint8_t iv_buf[BCMFS_MAX_IV_SIZE];
984 	enum spu2_cipher_mode spu2_ciph_mode = 0;
985 	enum spu2_hash_mode spu2_auth_mode = 0;
986 	enum spu2_cipher_type spu2_ciph_type = SPU2_CIPHER_TYPE_NONE;
987 	enum spu2_hash_type spu2_auth_type = SPU2_HASH_TYPE_NONE;
988 	uint64_t ksize = (key != NULL) ? fsattr_sz(key) : 0;
989 	uint64_t iv_size = (iv != NULL) ? fsattr_sz(iv) : 0;
990 	uint64_t aad_size = (aad != NULL) ? fsattr_sz(aad) : 0;
991 	uint64_t digest_size = (digest != NULL) ?
992 				fsattr_sz(digest) : 0;
993 	bool is_inbound = (aeop == RTE_CRYPTO_AEAD_OP_DECRYPT);
994 
995 	if (src == NULL)
996 		return -EINVAL;
997 
998 	payload_len = fsattr_sz(src);
999 	if (!payload_len) {
1000 		BCMFS_DP_LOG(ERR, "null payload not supported");
1001 		return -EINVAL;
1002 	}
1003 
1004 	switch (ksize) {
1005 	case BCMFS_CRYPTO_AES128:
1006 		spu2_auth_type = SPU2_HASH_TYPE_AES128;
1007 		spu2_ciph_type = SPU2_CIPHER_TYPE_AES128;
1008 		break;
1009 	case BCMFS_CRYPTO_AES192:
1010 		spu2_auth_type = SPU2_HASH_TYPE_AES192;
1011 		spu2_ciph_type = SPU2_CIPHER_TYPE_AES192;
1012 		break;
1013 	case BCMFS_CRYPTO_AES256:
1014 		spu2_auth_type = SPU2_HASH_TYPE_AES256;
1015 		spu2_ciph_type = SPU2_CIPHER_TYPE_AES256;
1016 		break;
1017 	default:
1018 		return -EINVAL;
1019 	}
1020 
1021 	if (ae_algo == RTE_CRYPTO_AEAD_AES_GCM) {
1022 		spu2_auth_mode = SPU2_HASH_MODE_GCM;
1023 		spu2_ciph_mode = SPU2_CIPHER_MODE_GCM;
1024 		/*
1025 		 * SPU2 needs in total 12 bytes of IV
1026 		 * ie IV of 8 bytes(random number) and 4 bytes of salt.
1027 		 */
1028 		if (fsattr_sz(iv) > 12)
1029 			iv_size = 12;
1030 
1031 		/*
1032 		 * On SPU 2, aes gcm cipher first on encrypt, auth first on
1033 		 * decrypt
1034 		 */
1035 
1036 		auth_first = (aeop == RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
1037 				0 : 1;
1038 	}
1039 
1040 	if (iv_size != 0)
1041 		memcpy(iv_buf, fsattr_va(iv), iv_size);
1042 
1043 	if (ae_algo == RTE_CRYPTO_AEAD_AES_CCM) {
1044 		spu2_auth_mode = SPU2_HASH_MODE_CCM;
1045 		spu2_ciph_mode = SPU2_CIPHER_MODE_CCM;
1046 		if (iv_size != 0)  {
1047 			memcpy(iv_buf, fsattr_va(iv),
1048 			       iv_size);
1049 			bcmfs_crypto_ccm_update_iv(iv_buf, &iv_size, false);
1050 		}
1051 
1052 		/* opposite for ccm (auth 1st on encrypt) */
1053 		auth_first = (aeop == RTE_CRYPTO_AEAD_OP_ENCRYPT) ?
1054 			      0 : 1;
1055 	}
1056 
1057 	fmd  = &sreq->fmd;
1058 
1059 	spu2_fmd_ctrl0_write(fmd, is_inbound, auth_first, SPU2_PROTO_RESV,
1060 			     spu2_ciph_type, spu2_ciph_mode,
1061 			     spu2_auth_type, spu2_auth_mode);
1062 
1063 	spu2_fmd_ctrl1_write(fmd, is_inbound, aad_size, 0,
1064 			     ksize, false, false, SPU2_VAL_NONE,
1065 			     SPU2_VAL_NONE, SPU2_VAL_NONE, iv_size,
1066 			     digest_size, false, SPU2_VAL_NONE);
1067 
1068 	spu2_fmd_ctrl2_write(fmd, aad_size, 0, 0,
1069 			     ksize, iv_size);
1070 
1071 	spu2_fmd_ctrl3_write(fmd, payload_len);
1072 
1073 	/* FMD */
1074 	sreq->msgs.srcs_addr[src_index] = sreq->fptr;
1075 	src_msg_len += sizeof(*fmd);
1076 
1077 	if (ksize) {
1078 		memcpy((uint8_t *)fmd + src_msg_len,
1079 		       fsattr_va(key), ksize);
1080 		src_msg_len += ksize;
1081 
1082 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
1083 	BCMFS_DP_HEXDUMP_LOG(DEBUG, "cipher key:", fsattr_va(key),
1084 			     ksize);
1085 #endif
1086 	}
1087 
1088 	if (iv_size) {
1089 		memcpy((uint8_t *)fmd + src_msg_len, iv_buf, iv_size);
1090 		src_msg_len += iv_size;
1091 
1092 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
1093 		BCMFS_DP_HEXDUMP_LOG(DEBUG, "iv key:", fsattr_va(iv),
1094 				     fsattr_sz(iv));
1095 #endif
1096 	} /* End of OMD */
1097 
1098 	sreq->msgs.srcs_len[src_index] = src_msg_len;
1099 
1100 	if (aad_size != 0) {
1101 		if (aad_size < BCMFS_AAD_THRESH_LEN) {
1102 			memcpy((uint8_t *)fmd + src_msg_len, fsattr_va(aad), aad_size);
1103 			sreq->msgs.srcs_len[src_index] += aad_size;
1104 		} else {
1105 			src_index++;
1106 			sreq->msgs.srcs_addr[src_index] = fsattr_pa(aad);
1107 			sreq->msgs.srcs_len[src_index] = aad_size;
1108 		}
1109 #if RTE_LOG_DP_LEVEL >= RTE_LOG_DEBUG
1110 		BCMFS_DP_HEXDUMP_LOG(DEBUG, "aad :", fsattr_va(aad),
1111 				     aad_size);
1112 #endif
1113 	}
1114 
1115 	src_index++;
1116 
1117 	sreq->msgs.srcs_addr[src_index] = fsattr_pa(src);
1118 	sreq->msgs.srcs_len[src_index] = fsattr_sz(src);
1119 	src_index++;
1120 
1121 	if (aeop == RTE_CRYPTO_AEAD_OP_DECRYPT && digest != NULL &&
1122 	    fsattr_sz(digest) != 0) {
1123 		sreq->msgs.srcs_addr[src_index] = fsattr_pa(digest);
1124 		sreq->msgs.srcs_len[src_index] = fsattr_sz(digest);
1125 		src_index++;
1126 	}
1127 	sreq->msgs.srcs_count = src_index;
1128 
1129 	if (dst != NULL) {
1130 		sreq->msgs.dsts_addr[dst_index] = fsattr_pa(dst);
1131 		sreq->msgs.dsts_len[dst_index] = fsattr_sz(dst);
1132 		dst_index++;
1133 	}
1134 
1135 	if (aeop == RTE_CRYPTO_AEAD_OP_DECRYPT) {
1136 		/*
1137 		 * In case of decryption digest data is generated by
1138 		 * SPU2 engine but application doesn't need digest
1139 		 * as such. So program dummy location to capture
1140 		 * digest data
1141 		 */
1142 		if (digest_size != 0) {
1143 			sreq->msgs.dsts_addr[dst_index] =
1144 				sreq->dptr;
1145 			sreq->msgs.dsts_len[dst_index] =
1146 				digest_size;
1147 			dst_index++;
1148 		}
1149 	} else {
1150 		if (digest_size != 0) {
1151 			sreq->msgs.dsts_addr[dst_index] =
1152 				fsattr_pa(digest);
1153 			sreq->msgs.dsts_len[dst_index] =
1154 				digest_size;
1155 			dst_index++;
1156 		}
1157 	}
1158 
1159 	sreq->msgs.dsts_addr[dst_index] = sreq->rptr;
1160 	sreq->msgs.dsts_len[dst_index] = SPU2_STATUS_LEN;
1161 	dst_index++;
1162 	sreq->msgs.dsts_count = dst_index;
1163 
1164 	return 0;
1165 }
1166