1 /*
2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
14 #include <string.h>
15 #include <assert.h>
16 #include <openssl/aes.h>
17 #include "crypto/evp.h"
18 #include "modes_local.h"
19 #include <openssl/rand.h>
20 #include "evp_local.h"
21
22 typedef struct {
23 union {
24 double align;
25 AES_KEY ks;
26 } ks;
27 block128_f block;
28 union {
29 cbc128_f cbc;
30 ctr128_f ctr;
31 } stream;
32 } EVP_AES_KEY;
33
34 typedef struct {
35 union {
36 double align;
37 AES_KEY ks;
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
41 GCM128_CONTEXT gcm;
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
44 int taglen;
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
47 ctr128_f ctr;
48 } EVP_AES_GCM_CTX;
49
50 typedef struct {
51 union {
52 double align;
53 AES_KEY ks;
54 } ks1, ks2; /* AES key schedules to use */
55 XTS128_CONTEXT xts;
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
60 } EVP_AES_XTS_CTX;
61
62 typedef struct {
63 union {
64 double align;
65 AES_KEY ks;
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
73 CCM128_CONTEXT ccm;
74 ccm128_f str;
75 } EVP_AES_CCM_CTX;
76
77 #ifndef OPENSSL_NO_OCB
78 typedef struct {
79 union {
80 double align;
81 AES_KEY ks;
82 } ksenc; /* AES key schedule to use for encryption */
83 union {
84 double align;
85 AES_KEY ks;
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
89 OCB128_CONTEXT ocb;
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
94 int data_buf_len;
95 int aad_buf_len;
96 int ivlen; /* IV length */
97 int taglen;
98 } EVP_AES_OCB_CTX;
99 #endif
100
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
102
103 #ifdef VPAES_ASM
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
105 AES_KEY *key);
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
107 AES_KEY *key);
108
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
110 const AES_KEY *key);
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
112 const AES_KEY *key);
113
114 void vpaes_cbc_encrypt(const unsigned char *in,
115 unsigned char *out,
116 size_t length,
117 const AES_KEY *key, unsigned char *ivec, int enc);
118 #endif
119 #ifdef BSAES_ASM
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
132 #endif
133 #ifdef AES_CTR_ASM
134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
135 size_t blocks, const AES_KEY *key,
136 const unsigned char ivec[AES_BLOCK_SIZE]);
137 #endif
138 #ifdef AES_XTS_ASM
139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
140 const AES_KEY *key1, const AES_KEY *key2,
141 const unsigned char iv[16]);
142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
145 #endif
146
147 /* increment counter (64-bit int) by 1 */
ctr64_inc(unsigned char * counter)148 static void ctr64_inc(unsigned char *counter)
149 {
150 int n = 8;
151 unsigned char c;
152
153 do {
154 --n;
155 c = counter[n];
156 ++c;
157 counter[n] = c;
158 if (c)
159 return;
160 } while (n);
161 }
162
163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
164 # include "ppc_arch.h"
165 # ifdef VPAES_ASM
166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
167 # endif
168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
171 # define HWAES_encrypt aes_p8_encrypt
172 # define HWAES_decrypt aes_p8_decrypt
173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
175 # define HWAES_xts_encrypt aes_p8_xts_encrypt
176 # define HWAES_xts_decrypt aes_p8_xts_decrypt
177 #endif
178
179 #if defined(OPENSSL_CPUID_OBJ) && ( \
180 ((defined(__i386) || defined(__i386__) || \
181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
182 defined(__x86_64) || defined(__x86_64__) || \
183 defined(_M_AMD64) || defined(_M_X64) )
184
185 extern unsigned int OPENSSL_ia32cap_P[];
186
187 # ifdef VPAES_ASM
188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
189 # endif
190 # ifdef BSAES_ASM
191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
192 # endif
193 /*
194 * AES-NI section
195 */
196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
197
198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
199 AES_KEY *key);
200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
201 AES_KEY *key);
202
203 void aesni_encrypt(const unsigned char *in, unsigned char *out,
204 const AES_KEY *key);
205 void aesni_decrypt(const unsigned char *in, unsigned char *out,
206 const AES_KEY *key);
207
208 void aesni_ecb_encrypt(const unsigned char *in,
209 unsigned char *out,
210 size_t length, const AES_KEY *key, int enc);
211 void aesni_cbc_encrypt(const unsigned char *in,
212 unsigned char *out,
213 size_t length,
214 const AES_KEY *key, unsigned char *ivec, int enc);
215
216 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
217 unsigned char *out,
218 size_t blocks,
219 const void *key, const unsigned char *ivec);
220
221 void aesni_xts_encrypt(const unsigned char *in,
222 unsigned char *out,
223 size_t length,
224 const AES_KEY *key1, const AES_KEY *key2,
225 const unsigned char iv[16]);
226
227 void aesni_xts_decrypt(const unsigned char *in,
228 unsigned char *out,
229 size_t length,
230 const AES_KEY *key1, const AES_KEY *key2,
231 const unsigned char iv[16]);
232
233 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
234 unsigned char *out,
235 size_t blocks,
236 const void *key,
237 const unsigned char ivec[16],
238 unsigned char cmac[16]);
239
240 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
241 unsigned char *out,
242 size_t blocks,
243 const void *key,
244 const unsigned char ivec[16],
245 unsigned char cmac[16]);
246
247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
248 size_t aesni_gcm_encrypt(const unsigned char *in,
249 unsigned char *out,
250 size_t len,
251 const void *key, unsigned char ivec[16], u64 *Xi);
252 # define AES_gcm_encrypt aesni_gcm_encrypt
253 size_t aesni_gcm_decrypt(const unsigned char *in,
254 unsigned char *out,
255 size_t len,
256 const void *key, unsigned char ivec[16], u64 *Xi);
257 # define AES_gcm_decrypt aesni_gcm_decrypt
258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
259 size_t len);
260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
261 gctx->gcm.ghash==gcm_ghash_avx)
262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
263 gctx->gcm.ghash==gcm_ghash_avx)
264 # undef AES_GCM_ASM2 /* minor size optimization */
265 # endif
266
aesni_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
268 const unsigned char *iv, int enc)
269 {
270 int ret, mode;
271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
272
273 mode = EVP_CIPHER_CTX_mode(ctx);
274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
275 && !enc) {
276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
277 &dat->ks.ks);
278 dat->block = (block128_f) aesni_decrypt;
279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
280 (cbc128_f) aesni_cbc_encrypt : NULL;
281 } else {
282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
283 &dat->ks.ks);
284 dat->block = (block128_f) aesni_encrypt;
285 if (mode == EVP_CIPH_CBC_MODE)
286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
287 else if (mode == EVP_CIPH_CTR_MODE)
288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
289 else
290 dat->stream.cbc = NULL;
291 }
292
293 if (ret < 0) {
294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
295 return 0;
296 }
297
298 return 1;
299 }
300
aesni_cbc_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
302 const unsigned char *in, size_t len)
303 {
304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
305 EVP_CIPHER_CTX_iv_noconst(ctx),
306 EVP_CIPHER_CTX_encrypting(ctx));
307
308 return 1;
309 }
310
aesni_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len)
313 {
314 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
315
316 if (len < bl)
317 return 1;
318
319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
320 EVP_CIPHER_CTX_encrypting(ctx));
321
322 return 1;
323 }
324
325 # define aesni_ofb_cipher aes_ofb_cipher
326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
327 const unsigned char *in, size_t len);
328
329 # define aesni_cfb_cipher aes_cfb_cipher
330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
331 const unsigned char *in, size_t len);
332
333 # define aesni_cfb8_cipher aes_cfb8_cipher
334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
335 const unsigned char *in, size_t len);
336
337 # define aesni_cfb1_cipher aes_cfb1_cipher
338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
339 const unsigned char *in, size_t len);
340
341 # define aesni_ctr_cipher aes_ctr_cipher
342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
343 const unsigned char *in, size_t len);
344
aesni_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
346 const unsigned char *iv, int enc)
347 {
348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
349 if (!iv && !key)
350 return 1;
351 if (key) {
352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
353 &gctx->ks.ks);
354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
356 /*
357 * If we have an iv can set it directly, otherwise use saved IV.
358 */
359 if (iv == NULL && gctx->iv_set)
360 iv = gctx->iv;
361 if (iv) {
362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
363 gctx->iv_set = 1;
364 }
365 gctx->key_set = 1;
366 } else {
367 /* If key set use IV, otherwise copy */
368 if (gctx->key_set)
369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
370 else
371 memcpy(gctx->iv, iv, gctx->ivlen);
372 gctx->iv_set = 1;
373 gctx->iv_gen = 0;
374 }
375 return 1;
376 }
377
378 # define aesni_gcm_cipher aes_gcm_cipher
379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
380 const unsigned char *in, size_t len);
381
aesni_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
383 const unsigned char *iv, int enc)
384 {
385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
386
387 if (!iv && !key)
388 return 1;
389
390 if (key) {
391 /* The key is two half length keys in reality */
392 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
393
394 /*
395 * Verify that the two keys are different.
396 *
397 * This addresses Rogaway's vulnerability.
398 * See comment in aes_xts_init_key() below.
399 */
400 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
401 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
402 return 0;
403 }
404
405 /* key_len is two AES keys */
406 if (enc) {
407 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
408 &xctx->ks1.ks);
409 xctx->xts.block1 = (block128_f) aesni_encrypt;
410 xctx->stream = aesni_xts_encrypt;
411 } else {
412 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
413 &xctx->ks1.ks);
414 xctx->xts.block1 = (block128_f) aesni_decrypt;
415 xctx->stream = aesni_xts_decrypt;
416 }
417
418 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
419 EVP_CIPHER_CTX_key_length(ctx) * 4,
420 &xctx->ks2.ks);
421 xctx->xts.block2 = (block128_f) aesni_encrypt;
422
423 xctx->xts.key1 = &xctx->ks1;
424 }
425
426 if (iv) {
427 xctx->xts.key2 = &xctx->ks2;
428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
429 }
430
431 return 1;
432 }
433
434 # define aesni_xts_cipher aes_xts_cipher
435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
436 const unsigned char *in, size_t len);
437
aesni_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
439 const unsigned char *iv, int enc)
440 {
441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
442 if (!iv && !key)
443 return 1;
444 if (key) {
445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
446 &cctx->ks.ks);
447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
448 &cctx->ks, (block128_f) aesni_encrypt);
449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
450 (ccm128_f) aesni_ccm64_decrypt_blocks;
451 cctx->key_set = 1;
452 }
453 if (iv) {
454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
455 cctx->iv_set = 1;
456 }
457 return 1;
458 }
459
460 # define aesni_ccm_cipher aes_ccm_cipher
461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
462 const unsigned char *in, size_t len);
463
464 # ifndef OPENSSL_NO_OCB
465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
466 size_t blocks, const void *key,
467 size_t start_block_num,
468 unsigned char offset_i[16],
469 const unsigned char L_[][16],
470 unsigned char checksum[16]);
471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
472 size_t blocks, const void *key,
473 size_t start_block_num,
474 unsigned char offset_i[16],
475 const unsigned char L_[][16],
476 unsigned char checksum[16]);
477
aesni_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
479 const unsigned char *iv, int enc)
480 {
481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
482 if (!iv && !key)
483 return 1;
484 if (key) {
485 do {
486 /*
487 * We set both the encrypt and decrypt key here because decrypt
488 * needs both. We could possibly optimise to remove setting the
489 * decrypt for an encryption operation.
490 */
491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
492 &octx->ksenc.ks);
493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
494 &octx->ksdec.ks);
495 if (!CRYPTO_ocb128_init(&octx->ocb,
496 &octx->ksenc.ks, &octx->ksdec.ks,
497 (block128_f) aesni_encrypt,
498 (block128_f) aesni_decrypt,
499 enc ? aesni_ocb_encrypt
500 : aesni_ocb_decrypt))
501 return 0;
502 }
503 while (0);
504
505 /*
506 * If we have an iv we can set it directly, otherwise use saved IV.
507 */
508 if (iv == NULL && octx->iv_set)
509 iv = octx->iv;
510 if (iv) {
511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
512 != 1)
513 return 0;
514 octx->iv_set = 1;
515 }
516 octx->key_set = 1;
517 } else {
518 /* If key set use IV, otherwise copy */
519 if (octx->key_set)
520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
521 else
522 memcpy(octx->iv, iv, octx->ivlen);
523 octx->iv_set = 1;
524 }
525 return 1;
526 }
527
528 # define aesni_ocb_cipher aes_ocb_cipher
529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531 # endif /* OPENSSL_NO_OCB */
532
533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
534 static const EVP_CIPHER aesni_##keylen##_##mode = { \
535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
537 aesni_init_key, \
538 aesni_##mode##_cipher, \
539 NULL, \
540 sizeof(EVP_AES_KEY), \
541 NULL,NULL,NULL,NULL }; \
542 static const EVP_CIPHER aes_##keylen##_##mode = { \
543 nid##_##keylen##_##nmode,blocksize, \
544 keylen/8,ivlen, \
545 flags|EVP_CIPH_##MODE##_MODE, \
546 aes_init_key, \
547 aes_##mode##_cipher, \
548 NULL, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
553
554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
555 static const EVP_CIPHER aesni_##keylen##_##mode = { \
556 nid##_##keylen##_##mode,blocksize, \
557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
558 flags|EVP_CIPH_##MODE##_MODE, \
559 aesni_##mode##_init_key, \
560 aesni_##mode##_cipher, \
561 aes_##mode##_cleanup, \
562 sizeof(EVP_AES_##MODE##_CTX), \
563 NULL,NULL,aes_##mode##_ctrl,NULL }; \
564 static const EVP_CIPHER aes_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
567 flags|EVP_CIPH_##MODE##_MODE, \
568 aes_##mode##_init_key, \
569 aes_##mode##_cipher, \
570 aes_##mode##_cleanup, \
571 sizeof(EVP_AES_##MODE##_CTX), \
572 NULL,NULL,aes_##mode##_ctrl,NULL }; \
573 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
574 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
575
576 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
577
578 # include "sparc_arch.h"
579
580 extern unsigned int OPENSSL_sparcv9cap_P[];
581
582 /*
583 * Initial Fujitsu SPARC64 X support
584 */
585 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
586 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
587 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
588 # define HWAES_encrypt aes_fx_encrypt
589 # define HWAES_decrypt aes_fx_decrypt
590 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
591 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
592
593 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
594
595 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
596 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
597 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
598 const AES_KEY *key);
599 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
600 const AES_KEY *key);
601 /*
602 * Key-length specific subroutines were chosen for following reason.
603 * Each SPARC T4 core can execute up to 8 threads which share core's
604 * resources. Loading as much key material to registers allows to
605 * minimize references to shared memory interface, as well as amount
606 * of instructions in inner loops [much needed on T4]. But then having
607 * non-key-length specific routines would require conditional branches
608 * either in inner loops or on subroutines' entries. Former is hardly
609 * acceptable, while latter means code size increase to size occupied
610 * by multiple key-length specific subroutines, so why fight?
611 */
612 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
613 size_t len, const AES_KEY *key,
614 unsigned char *ivec, int /*unused*/);
615 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
616 size_t len, const AES_KEY *key,
617 unsigned char *ivec, int /*unused*/);
618 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
619 size_t len, const AES_KEY *key,
620 unsigned char *ivec, int /*unused*/);
621 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
622 size_t len, const AES_KEY *key,
623 unsigned char *ivec, int /*unused*/);
624 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
625 size_t len, const AES_KEY *key,
626 unsigned char *ivec, int /*unused*/);
627 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
628 size_t len, const AES_KEY *key,
629 unsigned char *ivec, int /*unused*/);
630 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
631 size_t blocks, const AES_KEY *key,
632 unsigned char *ivec);
633 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
634 size_t blocks, const AES_KEY *key,
635 unsigned char *ivec);
636 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
637 size_t blocks, const AES_KEY *key,
638 unsigned char *ivec);
639 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
640 size_t blocks, const AES_KEY *key1,
641 const AES_KEY *key2, const unsigned char *ivec);
642 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
643 size_t blocks, const AES_KEY *key1,
644 const AES_KEY *key2, const unsigned char *ivec);
645 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
646 size_t blocks, const AES_KEY *key1,
647 const AES_KEY *key2, const unsigned char *ivec);
648 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
649 size_t blocks, const AES_KEY *key1,
650 const AES_KEY *key2, const unsigned char *ivec);
651
aes_t4_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)652 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
653 const unsigned char *iv, int enc)
654 {
655 int ret, mode, bits;
656 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
657
658 mode = EVP_CIPHER_CTX_mode(ctx);
659 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
660 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
661 && !enc) {
662 ret = 0;
663 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
664 dat->block = (block128_f) aes_t4_decrypt;
665 switch (bits) {
666 case 128:
667 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
668 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
669 break;
670 case 192:
671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
672 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
673 break;
674 case 256:
675 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
676 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
677 break;
678 default:
679 ret = -1;
680 }
681 } else {
682 ret = 0;
683 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
684 dat->block = (block128_f) aes_t4_encrypt;
685 switch (bits) {
686 case 128:
687 if (mode == EVP_CIPH_CBC_MODE)
688 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
689 else if (mode == EVP_CIPH_CTR_MODE)
690 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
691 else
692 dat->stream.cbc = NULL;
693 break;
694 case 192:
695 if (mode == EVP_CIPH_CBC_MODE)
696 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
697 else if (mode == EVP_CIPH_CTR_MODE)
698 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
699 else
700 dat->stream.cbc = NULL;
701 break;
702 case 256:
703 if (mode == EVP_CIPH_CBC_MODE)
704 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
705 else if (mode == EVP_CIPH_CTR_MODE)
706 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
707 else
708 dat->stream.cbc = NULL;
709 break;
710 default:
711 ret = -1;
712 }
713 }
714
715 if (ret < 0) {
716 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
717 return 0;
718 }
719
720 return 1;
721 }
722
723 # define aes_t4_cbc_cipher aes_cbc_cipher
724 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
725 const unsigned char *in, size_t len);
726
727 # define aes_t4_ecb_cipher aes_ecb_cipher
728 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
729 const unsigned char *in, size_t len);
730
731 # define aes_t4_ofb_cipher aes_ofb_cipher
732 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
733 const unsigned char *in, size_t len);
734
735 # define aes_t4_cfb_cipher aes_cfb_cipher
736 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
737 const unsigned char *in, size_t len);
738
739 # define aes_t4_cfb8_cipher aes_cfb8_cipher
740 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
741 const unsigned char *in, size_t len);
742
743 # define aes_t4_cfb1_cipher aes_cfb1_cipher
744 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
745 const unsigned char *in, size_t len);
746
747 # define aes_t4_ctr_cipher aes_ctr_cipher
748 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
749 const unsigned char *in, size_t len);
750
aes_t4_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)751 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
752 const unsigned char *iv, int enc)
753 {
754 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
755 if (!iv && !key)
756 return 1;
757 if (key) {
758 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
759 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
760 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
761 (block128_f) aes_t4_encrypt);
762 switch (bits) {
763 case 128:
764 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
765 break;
766 case 192:
767 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
768 break;
769 case 256:
770 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
771 break;
772 default:
773 return 0;
774 }
775 /*
776 * If we have an iv can set it directly, otherwise use saved IV.
777 */
778 if (iv == NULL && gctx->iv_set)
779 iv = gctx->iv;
780 if (iv) {
781 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
782 gctx->iv_set = 1;
783 }
784 gctx->key_set = 1;
785 } else {
786 /* If key set use IV, otherwise copy */
787 if (gctx->key_set)
788 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
789 else
790 memcpy(gctx->iv, iv, gctx->ivlen);
791 gctx->iv_set = 1;
792 gctx->iv_gen = 0;
793 }
794 return 1;
795 }
796
797 # define aes_t4_gcm_cipher aes_gcm_cipher
798 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
799 const unsigned char *in, size_t len);
800
aes_t4_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)801 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
802 const unsigned char *iv, int enc)
803 {
804 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
805
806 if (!iv && !key)
807 return 1;
808
809 if (key) {
810 /* The key is two half length keys in reality */
811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
812 const int bits = bytes * 8;
813
814 /*
815 * Verify that the two keys are different.
816 *
817 * This addresses Rogaway's vulnerability.
818 * See comment in aes_xts_init_key() below.
819 */
820 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
822 return 0;
823 }
824
825 xctx->stream = NULL;
826 /* key_len is two AES keys */
827 if (enc) {
828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
829 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
830 switch (bits) {
831 case 128:
832 xctx->stream = aes128_t4_xts_encrypt;
833 break;
834 case 256:
835 xctx->stream = aes256_t4_xts_encrypt;
836 break;
837 default:
838 return 0;
839 }
840 } else {
841 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
842 &xctx->ks1.ks);
843 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
844 switch (bits) {
845 case 128:
846 xctx->stream = aes128_t4_xts_decrypt;
847 break;
848 case 256:
849 xctx->stream = aes256_t4_xts_decrypt;
850 break;
851 default:
852 return 0;
853 }
854 }
855
856 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
857 EVP_CIPHER_CTX_key_length(ctx) * 4,
858 &xctx->ks2.ks);
859 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
860
861 xctx->xts.key1 = &xctx->ks1;
862 }
863
864 if (iv) {
865 xctx->xts.key2 = &xctx->ks2;
866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
867 }
868
869 return 1;
870 }
871
872 # define aes_t4_xts_cipher aes_xts_cipher
873 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
874 const unsigned char *in, size_t len);
875
aes_t4_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)876 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
877 const unsigned char *iv, int enc)
878 {
879 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
880 if (!iv && !key)
881 return 1;
882 if (key) {
883 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
884 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
885 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
886 &cctx->ks, (block128_f) aes_t4_encrypt);
887 cctx->str = NULL;
888 cctx->key_set = 1;
889 }
890 if (iv) {
891 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
892 cctx->iv_set = 1;
893 }
894 return 1;
895 }
896
897 # define aes_t4_ccm_cipher aes_ccm_cipher
898 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
899 const unsigned char *in, size_t len);
900
901 # ifndef OPENSSL_NO_OCB
aes_t4_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)902 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
903 const unsigned char *iv, int enc)
904 {
905 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
906 if (!iv && !key)
907 return 1;
908 if (key) {
909 do {
910 /*
911 * We set both the encrypt and decrypt key here because decrypt
912 * needs both. We could possibly optimise to remove setting the
913 * decrypt for an encryption operation.
914 */
915 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
916 &octx->ksenc.ks);
917 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
918 &octx->ksdec.ks);
919 if (!CRYPTO_ocb128_init(&octx->ocb,
920 &octx->ksenc.ks, &octx->ksdec.ks,
921 (block128_f) aes_t4_encrypt,
922 (block128_f) aes_t4_decrypt,
923 NULL))
924 return 0;
925 }
926 while (0);
927
928 /*
929 * If we have an iv we can set it directly, otherwise use saved IV.
930 */
931 if (iv == NULL && octx->iv_set)
932 iv = octx->iv;
933 if (iv) {
934 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
935 != 1)
936 return 0;
937 octx->iv_set = 1;
938 }
939 octx->key_set = 1;
940 } else {
941 /* If key set use IV, otherwise copy */
942 if (octx->key_set)
943 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
944 else
945 memcpy(octx->iv, iv, octx->ivlen);
946 octx->iv_set = 1;
947 }
948 return 1;
949 }
950
951 # define aes_t4_ocb_cipher aes_ocb_cipher
952 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
953 const unsigned char *in, size_t len);
954 # endif /* OPENSSL_NO_OCB */
955
956 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
958 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
959 flags|EVP_CIPH_##MODE##_MODE, \
960 aes_t4_init_key, \
961 aes_t4_##mode##_cipher, \
962 NULL, \
963 sizeof(EVP_AES_KEY), \
964 NULL,NULL,NULL,NULL }; \
965 static const EVP_CIPHER aes_##keylen##_##mode = { \
966 nid##_##keylen##_##nmode,blocksize, \
967 keylen/8,ivlen, \
968 flags|EVP_CIPH_##MODE##_MODE, \
969 aes_init_key, \
970 aes_##mode##_cipher, \
971 NULL, \
972 sizeof(EVP_AES_KEY), \
973 NULL,NULL,NULL,NULL }; \
974 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
975 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
976
977 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
978 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
979 nid##_##keylen##_##mode,blocksize, \
980 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
981 flags|EVP_CIPH_##MODE##_MODE, \
982 aes_t4_##mode##_init_key, \
983 aes_t4_##mode##_cipher, \
984 aes_##mode##_cleanup, \
985 sizeof(EVP_AES_##MODE##_CTX), \
986 NULL,NULL,aes_##mode##_ctrl,NULL }; \
987 static const EVP_CIPHER aes_##keylen##_##mode = { \
988 nid##_##keylen##_##mode,blocksize, \
989 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
990 flags|EVP_CIPH_##MODE##_MODE, \
991 aes_##mode##_init_key, \
992 aes_##mode##_cipher, \
993 aes_##mode##_cleanup, \
994 sizeof(EVP_AES_##MODE##_CTX), \
995 NULL,NULL,aes_##mode##_ctrl,NULL }; \
996 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
997 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
998
999 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1000 /*
1001 * IBM S390X support
1002 */
1003 # include "s390x_arch.h"
1004
1005 typedef struct {
1006 union {
1007 double align;
1008 /*-
1009 * KM-AES parameter block - begin
1010 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1011 */
1012 struct {
1013 unsigned char k[32];
1014 } param;
1015 /* KM-AES parameter block - end */
1016 } km;
1017 unsigned int fc;
1018 } S390X_AES_ECB_CTX;
1019
1020 typedef struct {
1021 union {
1022 double align;
1023 /*-
1024 * KMO-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1026 */
1027 struct {
1028 unsigned char cv[16];
1029 unsigned char k[32];
1030 } param;
1031 /* KMO-AES parameter block - end */
1032 } kmo;
1033 unsigned int fc;
1034
1035 int res;
1036 } S390X_AES_OFB_CTX;
1037
1038 typedef struct {
1039 union {
1040 double align;
1041 /*-
1042 * KMF-AES parameter block - begin
1043 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1044 */
1045 struct {
1046 unsigned char cv[16];
1047 unsigned char k[32];
1048 } param;
1049 /* KMF-AES parameter block - end */
1050 } kmf;
1051 unsigned int fc;
1052
1053 int res;
1054 } S390X_AES_CFB_CTX;
1055
1056 typedef struct {
1057 union {
1058 double align;
1059 /*-
1060 * KMA-GCM-AES parameter block - begin
1061 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1062 */
1063 struct {
1064 unsigned char reserved[12];
1065 union {
1066 unsigned int w;
1067 unsigned char b[4];
1068 } cv;
1069 union {
1070 unsigned long long g[2];
1071 unsigned char b[16];
1072 } t;
1073 unsigned char h[16];
1074 unsigned long long taadl;
1075 unsigned long long tpcl;
1076 union {
1077 unsigned long long g[2];
1078 unsigned int w[4];
1079 } j0;
1080 unsigned char k[32];
1081 } param;
1082 /* KMA-GCM-AES parameter block - end */
1083 } kma;
1084 unsigned int fc;
1085 int key_set;
1086
1087 unsigned char *iv;
1088 int ivlen;
1089 int iv_set;
1090 int iv_gen;
1091
1092 int taglen;
1093
1094 unsigned char ares[16];
1095 unsigned char mres[16];
1096 unsigned char kres[16];
1097 int areslen;
1098 int mreslen;
1099 int kreslen;
1100
1101 int tls_aad_len;
1102 } S390X_AES_GCM_CTX;
1103
1104 typedef struct {
1105 union {
1106 double align;
1107 /*-
1108 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1109 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1110 * rounds field is used to store the function code and that the key
1111 * schedule is not stored (if aes hardware support is detected).
1112 */
1113 struct {
1114 unsigned char pad[16];
1115 AES_KEY k;
1116 } key;
1117
1118 struct {
1119 /*-
1120 * KMAC-AES parameter block - begin
1121 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1122 */
1123 struct {
1124 union {
1125 unsigned long long g[2];
1126 unsigned char b[16];
1127 } icv;
1128 unsigned char k[32];
1129 } kmac_param;
1130 /* KMAC-AES parameter block - end */
1131
1132 union {
1133 unsigned long long g[2];
1134 unsigned char b[16];
1135 } nonce;
1136 union {
1137 unsigned long long g[2];
1138 unsigned char b[16];
1139 } buf;
1140
1141 unsigned long long blocks;
1142 int l;
1143 int m;
1144 int tls_aad_len;
1145 int iv_set;
1146 int tag_set;
1147 int len_set;
1148 int key_set;
1149
1150 unsigned char pad[140];
1151 unsigned int fc;
1152 } ccm;
1153 } aes;
1154 } S390X_AES_CCM_CTX;
1155
1156 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1157 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1158
1159 /* Most modes of operation need km for partial block processing. */
1160 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1161 S390X_CAPBIT(S390X_AES_128))
1162 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1163 S390X_CAPBIT(S390X_AES_192))
1164 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1165 S390X_CAPBIT(S390X_AES_256))
1166
1167 # define s390x_aes_init_key aes_init_key
1168 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1169 const unsigned char *iv, int enc);
1170
1171 # define S390X_aes_128_cbc_CAPABLE 0 /* checked by callee */
1172 # define S390X_aes_192_cbc_CAPABLE 0
1173 # define S390X_aes_256_cbc_CAPABLE 0
1174 # define S390X_AES_CBC_CTX EVP_AES_KEY
1175
1176 # define s390x_aes_cbc_init_key aes_init_key
1177
1178 # define s390x_aes_cbc_cipher aes_cbc_cipher
1179 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1180 const unsigned char *in, size_t len);
1181
1182 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1183 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1184 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1185
s390x_aes_ecb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)1186 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1187 const unsigned char *key,
1188 const unsigned char *iv, int enc)
1189 {
1190 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1191 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1192
1193 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT);
1194
1195 if (key != NULL)
1196 memcpy(cctx->km.param.k, key, keylen);
1197
1198 return 1;
1199 }
1200
s390x_aes_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1201 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1202 const unsigned char *in, size_t len)
1203 {
1204 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1205
1206 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1207 return 1;
1208 }
1209
1210 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1211 (OPENSSL_s390xcap_P.kmo[0] & \
1212 S390X_CAPBIT(S390X_AES_128)))
1213 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1214 (OPENSSL_s390xcap_P.kmo[0] & \
1215 S390X_CAPBIT(S390X_AES_192)))
1216 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1217 (OPENSSL_s390xcap_P.kmo[0] & \
1218 S390X_CAPBIT(S390X_AES_256)))
1219
s390x_aes_ofb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1220 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1221 const unsigned char *key,
1222 const unsigned char *ivec, int enc)
1223 {
1224 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1225 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1226 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1227 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1228
1229 cctx->fc = S390X_AES_FC(keylen);
1230
1231 if (key != NULL)
1232 memcpy(cctx->kmo.param.k, key, keylen);
1233
1234 cctx->res = 0;
1235 memcpy(cctx->kmo.param.cv, oiv, ivlen);
1236 return 1;
1237 }
1238
s390x_aes_ofb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1239 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1240 const unsigned char *in, size_t len)
1241 {
1242 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1243 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1244 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1245 int n = cctx->res;
1246 int rem;
1247
1248 memcpy(cctx->kmo.param.cv, iv, ivlen);
1249 while (n && len) {
1250 *out = *in ^ cctx->kmo.param.cv[n];
1251 n = (n + 1) & 0xf;
1252 --len;
1253 ++in;
1254 ++out;
1255 }
1256
1257 rem = len & 0xf;
1258
1259 len &= ~(size_t)0xf;
1260 if (len) {
1261 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1262
1263 out += len;
1264 in += len;
1265 }
1266
1267 if (rem) {
1268 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1269 cctx->kmo.param.k);
1270
1271 while (rem--) {
1272 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1273 ++n;
1274 }
1275 }
1276
1277 memcpy(iv, cctx->kmo.param.cv, ivlen);
1278 cctx->res = n;
1279 return 1;
1280 }
1281
1282 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1283 (OPENSSL_s390xcap_P.kmf[0] & \
1284 S390X_CAPBIT(S390X_AES_128)))
1285 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1286 (OPENSSL_s390xcap_P.kmf[0] & \
1287 S390X_CAPBIT(S390X_AES_192)))
1288 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1289 (OPENSSL_s390xcap_P.kmf[0] & \
1290 S390X_CAPBIT(S390X_AES_256)))
1291
s390x_aes_cfb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1292 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1293 const unsigned char *key,
1294 const unsigned char *ivec, int enc)
1295 {
1296 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1297 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1298 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1299 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1300
1301 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT)
1302 | (16 << 24); /* 16 bytes cipher feedback */
1303
1304 if (key != NULL)
1305 memcpy(cctx->kmf.param.k, key, keylen);
1306
1307 cctx->res = 0;
1308 memcpy(cctx->kmf.param.cv, oiv, ivlen);
1309 return 1;
1310 }
1311
s390x_aes_cfb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1312 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1313 const unsigned char *in, size_t len)
1314 {
1315 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1316 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1317 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1318 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1319 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1320 int n = cctx->res;
1321 int rem;
1322 unsigned char tmp;
1323
1324 memcpy(cctx->kmf.param.cv, iv, ivlen);
1325 while (n && len) {
1326 tmp = *in;
1327 *out = cctx->kmf.param.cv[n] ^ tmp;
1328 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1329 n = (n + 1) & 0xf;
1330 --len;
1331 ++in;
1332 ++out;
1333 }
1334
1335 rem = len & 0xf;
1336
1337 len &= ~(size_t)0xf;
1338 if (len) {
1339 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1340
1341 out += len;
1342 in += len;
1343 }
1344
1345 if (rem) {
1346 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1347 S390X_AES_FC(keylen), cctx->kmf.param.k);
1348
1349 while (rem--) {
1350 tmp = in[n];
1351 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1352 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1353 ++n;
1354 }
1355 }
1356
1357 memcpy(iv, cctx->kmf.param.cv, ivlen);
1358 cctx->res = n;
1359 return 1;
1360 }
1361
1362 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1363 S390X_CAPBIT(S390X_AES_128))
1364 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1365 S390X_CAPBIT(S390X_AES_192))
1366 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1367 S390X_CAPBIT(S390X_AES_256))
1368
s390x_aes_cfb8_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1369 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1370 const unsigned char *key,
1371 const unsigned char *ivec, int enc)
1372 {
1373 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1374 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx);
1375 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1376 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1377
1378 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT)
1379 | (1 << 24); /* 1 byte cipher feedback flag */
1380
1381 if (key != NULL)
1382 memcpy(cctx->kmf.param.k, key, keylen);
1383
1384 cctx->res = 0;
1385 memcpy(cctx->kmf.param.cv, oiv, ivlen);
1386 return 1;
1387 }
1388
s390x_aes_cfb8_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1389 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1390 const unsigned char *in, size_t len)
1391 {
1392 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1393 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1394 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1395
1396 memcpy(cctx->kmf.param.cv, iv, ivlen);
1397 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1398 memcpy(iv, cctx->kmf.param.cv, ivlen);
1399 return 1;
1400 }
1401
1402 # define S390X_aes_128_cfb1_CAPABLE 0
1403 # define S390X_aes_192_cfb1_CAPABLE 0
1404 # define S390X_aes_256_cfb1_CAPABLE 0
1405
1406 # define s390x_aes_cfb1_init_key aes_init_key
1407
1408 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1409 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1410 const unsigned char *in, size_t len);
1411
1412 # define S390X_aes_128_ctr_CAPABLE 0 /* checked by callee */
1413 # define S390X_aes_192_ctr_CAPABLE 0
1414 # define S390X_aes_256_ctr_CAPABLE 0
1415 # define S390X_AES_CTR_CTX EVP_AES_KEY
1416
1417 # define s390x_aes_ctr_init_key aes_init_key
1418
1419 # define s390x_aes_ctr_cipher aes_ctr_cipher
1420 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1421 const unsigned char *in, size_t len);
1422
1423 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1424 (OPENSSL_s390xcap_P.kma[0] & \
1425 S390X_CAPBIT(S390X_AES_128)))
1426 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1427 (OPENSSL_s390xcap_P.kma[0] & \
1428 S390X_CAPBIT(S390X_AES_192)))
1429 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1430 (OPENSSL_s390xcap_P.kma[0] & \
1431 S390X_CAPBIT(S390X_AES_256)))
1432
1433 /* iv + padding length for iv lengths != 12 */
1434 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1435
1436 /*-
1437 * Process additional authenticated data. Returns 0 on success. Code is
1438 * big-endian.
1439 */
s390x_aes_gcm_aad(S390X_AES_GCM_CTX * ctx,const unsigned char * aad,size_t len)1440 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1441 size_t len)
1442 {
1443 unsigned long long alen;
1444 int n, rem;
1445
1446 if (ctx->kma.param.tpcl)
1447 return -2;
1448
1449 alen = ctx->kma.param.taadl + len;
1450 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1451 return -1;
1452 ctx->kma.param.taadl = alen;
1453
1454 n = ctx->areslen;
1455 if (n) {
1456 while (n && len) {
1457 ctx->ares[n] = *aad;
1458 n = (n + 1) & 0xf;
1459 ++aad;
1460 --len;
1461 }
1462 /* ctx->ares contains a complete block if offset has wrapped around */
1463 if (!n) {
1464 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1465 ctx->fc |= S390X_KMA_HS;
1466 }
1467 ctx->areslen = n;
1468 }
1469
1470 rem = len & 0xf;
1471
1472 len &= ~(size_t)0xf;
1473 if (len) {
1474 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1475 aad += len;
1476 ctx->fc |= S390X_KMA_HS;
1477 }
1478
1479 if (rem) {
1480 ctx->areslen = rem;
1481
1482 do {
1483 --rem;
1484 ctx->ares[rem] = aad[rem];
1485 } while (rem);
1486 }
1487 return 0;
1488 }
1489
1490 /*-
1491 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1492 * success. Code is big-endian.
1493 */
s390x_aes_gcm(S390X_AES_GCM_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len)1494 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1495 unsigned char *out, size_t len)
1496 {
1497 const unsigned char *inptr;
1498 unsigned long long mlen;
1499 union {
1500 unsigned int w[4];
1501 unsigned char b[16];
1502 } buf;
1503 size_t inlen;
1504 int n, rem, i;
1505
1506 mlen = ctx->kma.param.tpcl + len;
1507 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1508 return -1;
1509 ctx->kma.param.tpcl = mlen;
1510
1511 n = ctx->mreslen;
1512 if (n) {
1513 inptr = in;
1514 inlen = len;
1515 while (n && inlen) {
1516 ctx->mres[n] = *inptr;
1517 n = (n + 1) & 0xf;
1518 ++inptr;
1519 --inlen;
1520 }
1521 /* ctx->mres contains a complete block if offset has wrapped around */
1522 if (!n) {
1523 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1524 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1525 ctx->fc |= S390X_KMA_HS;
1526 ctx->areslen = 0;
1527
1528 /* previous call already encrypted/decrypted its remainder,
1529 * see comment below */
1530 n = ctx->mreslen;
1531 while (n) {
1532 *out = buf.b[n];
1533 n = (n + 1) & 0xf;
1534 ++out;
1535 ++in;
1536 --len;
1537 }
1538 ctx->mreslen = 0;
1539 }
1540 }
1541
1542 rem = len & 0xf;
1543
1544 len &= ~(size_t)0xf;
1545 if (len) {
1546 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1547 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1548 in += len;
1549 out += len;
1550 ctx->fc |= S390X_KMA_HS;
1551 ctx->areslen = 0;
1552 }
1553
1554 /*-
1555 * If there is a remainder, it has to be saved such that it can be
1556 * processed by kma later. However, we also have to do the for-now
1557 * unauthenticated encryption/decryption part here and now...
1558 */
1559 if (rem) {
1560 if (!ctx->mreslen) {
1561 buf.w[0] = ctx->kma.param.j0.w[0];
1562 buf.w[1] = ctx->kma.param.j0.w[1];
1563 buf.w[2] = ctx->kma.param.j0.w[2];
1564 buf.w[3] = ctx->kma.param.cv.w + 1;
1565 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1566 }
1567
1568 n = ctx->mreslen;
1569 for (i = 0; i < rem; i++) {
1570 ctx->mres[n + i] = in[i];
1571 out[i] = in[i] ^ ctx->kres[n + i];
1572 }
1573
1574 ctx->mreslen += rem;
1575 }
1576 return 0;
1577 }
1578
1579 /*-
1580 * Initialize context structure. Code is big-endian.
1581 */
s390x_aes_gcm_setiv(S390X_AES_GCM_CTX * ctx)1582 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx)
1583 {
1584 ctx->kma.param.t.g[0] = 0;
1585 ctx->kma.param.t.g[1] = 0;
1586 ctx->kma.param.tpcl = 0;
1587 ctx->kma.param.taadl = 0;
1588 ctx->mreslen = 0;
1589 ctx->areslen = 0;
1590 ctx->kreslen = 0;
1591
1592 if (ctx->ivlen == 12) {
1593 memcpy(&ctx->kma.param.j0, ctx->iv, ctx->ivlen);
1594 ctx->kma.param.j0.w[3] = 1;
1595 ctx->kma.param.cv.w = 1;
1596 } else {
1597 /* ctx->iv has the right size and is already padded. */
1598 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1599 ctx->fc, &ctx->kma.param);
1600 ctx->fc |= S390X_KMA_HS;
1601
1602 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1603 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1604 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1605 ctx->kma.param.t.g[0] = 0;
1606 ctx->kma.param.t.g[1] = 0;
1607 }
1608 }
1609
1610 /*-
1611 * Performs various operations on the context structure depending on control
1612 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1613 * Code is big-endian.
1614 */
s390x_aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)1615 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1616 {
1617 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1618 S390X_AES_GCM_CTX *gctx_out;
1619 EVP_CIPHER_CTX *out;
1620 unsigned char *buf, *iv;
1621 int ivlen, enc, len;
1622
1623 switch (type) {
1624 case EVP_CTRL_INIT:
1625 ivlen = EVP_CIPHER_iv_length(c->cipher);
1626 iv = EVP_CIPHER_CTX_iv_noconst(c);
1627 gctx->key_set = 0;
1628 gctx->iv_set = 0;
1629 gctx->ivlen = ivlen;
1630 gctx->iv = iv;
1631 gctx->taglen = -1;
1632 gctx->iv_gen = 0;
1633 gctx->tls_aad_len = -1;
1634 return 1;
1635
1636 case EVP_CTRL_GET_IVLEN:
1637 *(int *)ptr = gctx->ivlen;
1638 return 1;
1639
1640 case EVP_CTRL_AEAD_SET_IVLEN:
1641 if (arg <= 0)
1642 return 0;
1643
1644 if (arg != 12) {
1645 iv = EVP_CIPHER_CTX_iv_noconst(c);
1646 len = S390X_gcm_ivpadlen(arg);
1647
1648 /* Allocate memory for iv if needed. */
1649 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1650 if (gctx->iv != iv)
1651 OPENSSL_free(gctx->iv);
1652
1653 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1654 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1655 return 0;
1656 }
1657 }
1658 /* Add padding. */
1659 memset(gctx->iv + arg, 0, len - arg - 8);
1660 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1661 }
1662 gctx->ivlen = arg;
1663 return 1;
1664
1665 case EVP_CTRL_AEAD_SET_TAG:
1666 buf = EVP_CIPHER_CTX_buf_noconst(c);
1667 enc = EVP_CIPHER_CTX_encrypting(c);
1668 if (arg <= 0 || arg > 16 || enc)
1669 return 0;
1670
1671 memcpy(buf, ptr, arg);
1672 gctx->taglen = arg;
1673 return 1;
1674
1675 case EVP_CTRL_AEAD_GET_TAG:
1676 enc = EVP_CIPHER_CTX_encrypting(c);
1677 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1678 return 0;
1679
1680 memcpy(ptr, gctx->kma.param.t.b, arg);
1681 return 1;
1682
1683 case EVP_CTRL_GCM_SET_IV_FIXED:
1684 /* Special case: -1 length restores whole iv */
1685 if (arg == -1) {
1686 memcpy(gctx->iv, ptr, gctx->ivlen);
1687 gctx->iv_gen = 1;
1688 return 1;
1689 }
1690 /*
1691 * Fixed field must be at least 4 bytes and invocation field at least
1692 * 8.
1693 */
1694 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1695 return 0;
1696
1697 if (arg)
1698 memcpy(gctx->iv, ptr, arg);
1699
1700 enc = EVP_CIPHER_CTX_encrypting(c);
1701 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1702 return 0;
1703
1704 gctx->iv_gen = 1;
1705 return 1;
1706
1707 case EVP_CTRL_GCM_IV_GEN:
1708 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1709 return 0;
1710
1711 s390x_aes_gcm_setiv(gctx);
1712
1713 if (arg <= 0 || arg > gctx->ivlen)
1714 arg = gctx->ivlen;
1715
1716 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1717 /*
1718 * Invocation field will be at least 8 bytes in size and so no need
1719 * to check wrap around or increment more than last 8 bytes.
1720 */
1721 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1722 gctx->iv_set = 1;
1723 return 1;
1724
1725 case EVP_CTRL_GCM_SET_IV_INV:
1726 enc = EVP_CIPHER_CTX_encrypting(c);
1727 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1728 return 0;
1729
1730 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1731 s390x_aes_gcm_setiv(gctx);
1732 gctx->iv_set = 1;
1733 return 1;
1734
1735 case EVP_CTRL_AEAD_TLS1_AAD:
1736 /* Save the aad for later use. */
1737 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1738 return 0;
1739
1740 buf = EVP_CIPHER_CTX_buf_noconst(c);
1741 memcpy(buf, ptr, arg);
1742 gctx->tls_aad_len = arg;
1743
1744 len = buf[arg - 2] << 8 | buf[arg - 1];
1745 /* Correct length for explicit iv. */
1746 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1747 return 0;
1748 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1749
1750 /* If decrypting correct for tag too. */
1751 enc = EVP_CIPHER_CTX_encrypting(c);
1752 if (!enc) {
1753 if (len < EVP_GCM_TLS_TAG_LEN)
1754 return 0;
1755 len -= EVP_GCM_TLS_TAG_LEN;
1756 }
1757 buf[arg - 2] = len >> 8;
1758 buf[arg - 1] = len & 0xff;
1759 /* Extra padding: tag appended to record. */
1760 return EVP_GCM_TLS_TAG_LEN;
1761
1762 case EVP_CTRL_COPY:
1763 out = ptr;
1764 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1765 iv = EVP_CIPHER_CTX_iv_noconst(c);
1766
1767 if (gctx->iv == iv) {
1768 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1769 } else {
1770 len = S390X_gcm_ivpadlen(gctx->ivlen);
1771
1772 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1773 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1774 return 0;
1775 }
1776
1777 memcpy(gctx_out->iv, gctx->iv, len);
1778 }
1779 return 1;
1780
1781 default:
1782 return -1;
1783 }
1784 }
1785
1786 /*-
1787 * Set key or iv or enc/dec. Returns 1 on success. Otherwise 0 is returned.
1788 */
s390x_aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)1789 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1790 const unsigned char *key,
1791 const unsigned char *iv, int enc)
1792 {
1793 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1794 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1795
1796 gctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT);
1797
1798 if (key != NULL) {
1799 gctx->fc &= ~S390X_KMA_HS;
1800 memcpy(&gctx->kma.param.k, key, keylen);
1801 gctx->key_set = 1;
1802 }
1803
1804 if (iv != NULL) {
1805 memcpy(gctx->iv, iv, gctx->ivlen);
1806 gctx->iv_gen = 0;
1807 gctx->iv_set = 1;
1808 }
1809
1810 if (gctx->key_set && gctx->iv_set)
1811 s390x_aes_gcm_setiv(gctx);
1812
1813 gctx->fc &= ~(S390X_KMA_LPC | S390X_KMA_LAAD);
1814 gctx->areslen = 0;
1815 gctx->mreslen = 0;
1816 gctx->kreslen = 0;
1817 return 1;
1818 }
1819
1820 /*-
1821 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1822 * if successful. Otherwise -1 is returned. Code is big-endian.
1823 */
s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1824 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1825 const unsigned char *in, size_t len)
1826 {
1827 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1828 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1829 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1830 int rv = -1;
1831
1832 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1833 return -1;
1834
1835 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1836 : EVP_CTRL_GCM_SET_IV_INV,
1837 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1838 goto err;
1839
1840 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1841 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1842 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1843
1844 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1845 gctx->kma.param.tpcl = len << 3;
1846 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1847 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1848
1849 if (enc) {
1850 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1851 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1852 } else {
1853 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1854 EVP_GCM_TLS_TAG_LEN)) {
1855 OPENSSL_cleanse(out, len);
1856 goto err;
1857 }
1858 rv = len;
1859 }
1860 err:
1861 gctx->iv_set = 0;
1862 gctx->tls_aad_len = -1;
1863 return rv;
1864 }
1865
1866 /*-
1867 * Called from EVP layer to initialize context, process additional
1868 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1869 * ciphertext or process a TLS packet, depending on context. Returns bytes
1870 * written on success. Otherwise -1 is returned. Code is big-endian.
1871 */
s390x_aes_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1872 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1873 const unsigned char *in, size_t len)
1874 {
1875 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1876 unsigned char *buf, tmp[16];
1877 int enc;
1878
1879 if (!gctx->key_set)
1880 return -1;
1881
1882 if (gctx->tls_aad_len >= 0)
1883 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1884
1885 if (!gctx->iv_set)
1886 return -1;
1887
1888 if (in != NULL) {
1889 if (out == NULL) {
1890 if (s390x_aes_gcm_aad(gctx, in, len))
1891 return -1;
1892 } else {
1893 if (s390x_aes_gcm(gctx, in, out, len))
1894 return -1;
1895 }
1896 return len;
1897 } else {
1898 gctx->kma.param.taadl <<= 3;
1899 gctx->kma.param.tpcl <<= 3;
1900 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1901 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1902 /* recall that we already did en-/decrypt gctx->mres
1903 * and returned it to caller... */
1904 OPENSSL_cleanse(tmp, gctx->mreslen);
1905
1906 enc = EVP_CIPHER_CTX_encrypting(ctx);
1907 if (enc) {
1908 gctx->taglen = 16;
1909 } else {
1910 if (gctx->taglen < 0)
1911 return -1;
1912
1913 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1914 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1915 return -1;
1916 }
1917 return 0;
1918 }
1919 }
1920
s390x_aes_gcm_cleanup(EVP_CIPHER_CTX * c)1921 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1922 {
1923 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1924 const unsigned char *iv;
1925
1926 if (gctx == NULL)
1927 return 0;
1928
1929 iv = EVP_CIPHER_CTX_iv(c);
1930 if (iv != gctx->iv)
1931 OPENSSL_free(gctx->iv);
1932
1933 OPENSSL_cleanse(gctx, sizeof(*gctx));
1934 return 1;
1935 }
1936
1937 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1938 # define S390X_aes_128_xts_CAPABLE 0 /* checked by callee */
1939 # define S390X_aes_256_xts_CAPABLE 0
1940
1941 # define s390x_aes_xts_init_key aes_xts_init_key
1942 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1943 const unsigned char *key,
1944 const unsigned char *iv, int enc);
1945 # define s390x_aes_xts_cipher aes_xts_cipher
1946 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1947 const unsigned char *in, size_t len);
1948 # define s390x_aes_xts_ctrl aes_xts_ctrl
1949 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1950 # define s390x_aes_xts_cleanup aes_xts_cleanup
1951
1952 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1953 (OPENSSL_s390xcap_P.kmac[0] & \
1954 S390X_CAPBIT(S390X_AES_128)))
1955 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1956 (OPENSSL_s390xcap_P.kmac[0] & \
1957 S390X_CAPBIT(S390X_AES_192)))
1958 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1959 (OPENSSL_s390xcap_P.kmac[0] & \
1960 S390X_CAPBIT(S390X_AES_256)))
1961
1962 # define S390X_CCM_AAD_FLAG 0x40
1963
1964 /*-
1965 * Set nonce and length fields. Code is big-endian.
1966 */
s390x_aes_ccm_setiv(S390X_AES_CCM_CTX * ctx,const unsigned char * nonce,size_t mlen)1967 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1968 const unsigned char *nonce,
1969 size_t mlen)
1970 {
1971 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1972 ctx->aes.ccm.nonce.g[1] = mlen;
1973 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1974 }
1975
1976 /*-
1977 * Process additional authenticated data. Code is big-endian.
1978 */
s390x_aes_ccm_aad(S390X_AES_CCM_CTX * ctx,const unsigned char * aad,size_t alen)1979 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1980 size_t alen)
1981 {
1982 unsigned char *ptr;
1983 int i, rem;
1984
1985 if (!alen)
1986 return;
1987
1988 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1989
1990 /* Suppress 'type-punned pointer dereference' warning. */
1991 ptr = ctx->aes.ccm.buf.b;
1992
1993 if (alen < ((1 << 16) - (1 << 8))) {
1994 *(uint16_t *)ptr = alen;
1995 i = 2;
1996 } else if (sizeof(alen) == 8
1997 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1998 *(uint16_t *)ptr = 0xffff;
1999 *(uint64_t *)(ptr + 2) = alen;
2000 i = 10;
2001 } else {
2002 *(uint16_t *)ptr = 0xfffe;
2003 *(uint32_t *)(ptr + 2) = alen;
2004 i = 6;
2005 }
2006
2007 while (i < 16 && alen) {
2008 ctx->aes.ccm.buf.b[i] = *aad;
2009 ++aad;
2010 --alen;
2011 ++i;
2012 }
2013 while (i < 16) {
2014 ctx->aes.ccm.buf.b[i] = 0;
2015 ++i;
2016 }
2017
2018 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2019 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2020 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2021 &ctx->aes.ccm.kmac_param);
2022 ctx->aes.ccm.blocks += 2;
2023
2024 rem = alen & 0xf;
2025 alen &= ~(size_t)0xf;
2026 if (alen) {
2027 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2028 ctx->aes.ccm.blocks += alen >> 4;
2029 aad += alen;
2030 }
2031 if (rem) {
2032 for (i = 0; i < rem; i++)
2033 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2034
2035 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2036 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2037 ctx->aes.ccm.kmac_param.k);
2038 ctx->aes.ccm.blocks++;
2039 }
2040 }
2041
2042 /*-
2043 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2044 * success.
2045 */
s390x_aes_ccm(S390X_AES_CCM_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len,int enc)2046 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2047 unsigned char *out, size_t len, int enc)
2048 {
2049 size_t n, rem;
2050 unsigned int i, l, num;
2051 unsigned char flags;
2052
2053 flags = ctx->aes.ccm.nonce.b[0];
2054 if (!(flags & S390X_CCM_AAD_FLAG)) {
2055 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2056 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2057 ctx->aes.ccm.blocks++;
2058 }
2059 l = flags & 0x7;
2060 ctx->aes.ccm.nonce.b[0] = l;
2061
2062 /*-
2063 * Reconstruct length from encoded length field
2064 * and initialize it with counter value.
2065 */
2066 n = 0;
2067 for (i = 15 - l; i < 15; i++) {
2068 n |= ctx->aes.ccm.nonce.b[i];
2069 ctx->aes.ccm.nonce.b[i] = 0;
2070 n <<= 8;
2071 }
2072 n |= ctx->aes.ccm.nonce.b[15];
2073 ctx->aes.ccm.nonce.b[15] = 1;
2074
2075 if (n != len)
2076 return -1; /* length mismatch */
2077
2078 if (enc) {
2079 /* Two operations per block plus one for tag encryption */
2080 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2081 if (ctx->aes.ccm.blocks > (1ULL << 61))
2082 return -2; /* too much data */
2083 }
2084
2085 num = 0;
2086 rem = len & 0xf;
2087 len &= ~(size_t)0xf;
2088
2089 if (enc) {
2090 /* mac-then-encrypt */
2091 if (len)
2092 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2093 if (rem) {
2094 for (i = 0; i < rem; i++)
2095 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2096
2097 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2098 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2099 ctx->aes.ccm.kmac_param.k);
2100 }
2101
2102 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2103 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2104 &num, (ctr128_f)AES_ctr32_encrypt);
2105 } else {
2106 /* decrypt-then-mac */
2107 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2108 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2109 &num, (ctr128_f)AES_ctr32_encrypt);
2110
2111 if (len)
2112 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2113 if (rem) {
2114 for (i = 0; i < rem; i++)
2115 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2116
2117 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2118 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2119 ctx->aes.ccm.kmac_param.k);
2120 }
2121 }
2122 /* encrypt tag */
2123 for (i = 15 - l; i < 16; i++)
2124 ctx->aes.ccm.nonce.b[i] = 0;
2125
2126 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2127 ctx->aes.ccm.kmac_param.k);
2128 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2129 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2130
2131 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2132 return 0;
2133 }
2134
2135 /*-
2136 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2137 * if successful. Otherwise -1 is returned.
2138 */
s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2139 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2140 const unsigned char *in, size_t len)
2141 {
2142 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2143 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx);
2144 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2145 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2146 unsigned char iv[EVP_MAX_IV_LENGTH];
2147
2148 if (out != in
2149 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2150 return -1;
2151
2152 if (enc) {
2153 /* Set explicit iv (sequence number). */
2154 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2155 }
2156
2157 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2158 /*-
2159 * Get explicit iv (sequence number). We already have fixed iv
2160 * (server/client_write_iv) here.
2161 */
2162 memcpy(iv, ivec, sizeof(iv));
2163 memcpy(iv + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2164 s390x_aes_ccm_setiv(cctx, iv, len);
2165
2166 /* Process aad (sequence number|type|version|length) */
2167 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2168
2169 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2170 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2171
2172 if (enc) {
2173 if (s390x_aes_ccm(cctx, in, out, len, enc))
2174 return -1;
2175
2176 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2177 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2178 } else {
2179 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2180 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2181 cctx->aes.ccm.m))
2182 return len;
2183 }
2184
2185 OPENSSL_cleanse(out, len);
2186 return -1;
2187 }
2188 }
2189
2190 /*-
2191 * Set key or iv or enc/dec. Returns 1 if successful.
2192 * Otherwise 0 is returned.
2193 */
s390x_aes_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2194 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2195 const unsigned char *key,
2196 const unsigned char *iv, int enc)
2197 {
2198 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2199 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
2200 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2201
2202 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2203
2204 if (key != NULL) {
2205 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2206 cctx->aes.ccm.key_set = 1;
2207 }
2208 if (iv != NULL) {
2209 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2210 cctx->aes.ccm.iv_set = 1;
2211 }
2212
2213 /* Store encoded m and l. */
2214 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2215 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2216 memset(cctx->aes.ccm.nonce.b + 1, 0, sizeof(cctx->aes.ccm.nonce.b) - 1);
2217
2218 cctx->aes.ccm.blocks = 0;
2219 cctx->aes.ccm.len_set = 0;
2220 return 1;
2221 }
2222
2223 /*-
2224 * Called from EVP layer to initialize context, process additional
2225 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2226 * plaintext or process a TLS packet, depending on context. Returns bytes
2227 * written on success. Otherwise -1 is returned.
2228 */
s390x_aes_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2229 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2230 const unsigned char *in, size_t len)
2231 {
2232 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2233 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2234 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx);
2235 unsigned char *buf;
2236 int rv;
2237
2238 if (!cctx->aes.ccm.key_set)
2239 return -1;
2240
2241 if (cctx->aes.ccm.tls_aad_len >= 0)
2242 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2243
2244 /*-
2245 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2246 * so integrity must be checked already at Update() i.e., before
2247 * potentially corrupted data is output.
2248 */
2249 if (in == NULL && out != NULL)
2250 return 0;
2251
2252 if (!cctx->aes.ccm.iv_set)
2253 return -1;
2254
2255 if (out == NULL) {
2256 /* Update(): Pass message length. */
2257 if (in == NULL) {
2258 s390x_aes_ccm_setiv(cctx, ivec, len);
2259
2260 cctx->aes.ccm.len_set = 1;
2261 return len;
2262 }
2263
2264 /* Update(): Process aad. */
2265 if (!cctx->aes.ccm.len_set && len)
2266 return -1;
2267
2268 s390x_aes_ccm_aad(cctx, in, len);
2269 return len;
2270 }
2271
2272 /* The tag must be set before actually decrypting data */
2273 if (!enc && !cctx->aes.ccm.tag_set)
2274 return -1;
2275
2276 /* Update(): Process message. */
2277
2278 if (!cctx->aes.ccm.len_set) {
2279 /*-
2280 * In case message length was not previously set explicitly via
2281 * Update(), set it now.
2282 */
2283 s390x_aes_ccm_setiv(cctx, ivec, len);
2284
2285 cctx->aes.ccm.len_set = 1;
2286 }
2287
2288 if (enc) {
2289 if (s390x_aes_ccm(cctx, in, out, len, enc))
2290 return -1;
2291
2292 cctx->aes.ccm.tag_set = 1;
2293 return len;
2294 } else {
2295 rv = -1;
2296
2297 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2298 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2299 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2300 cctx->aes.ccm.m))
2301 rv = len;
2302 }
2303
2304 if (rv == -1)
2305 OPENSSL_cleanse(out, len);
2306
2307 return rv;
2308 }
2309 }
2310
2311 /*-
2312 * Performs various operations on the context structure depending on control
2313 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2314 * Code is big-endian.
2315 */
s390x_aes_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)2316 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2317 {
2318 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2319 unsigned char *buf, *iv;
2320 int enc, len;
2321
2322 switch (type) {
2323 case EVP_CTRL_INIT:
2324 cctx->aes.ccm.key_set = 0;
2325 cctx->aes.ccm.iv_set = 0;
2326 cctx->aes.ccm.l = 8;
2327 cctx->aes.ccm.m = 12;
2328 cctx->aes.ccm.tag_set = 0;
2329 cctx->aes.ccm.len_set = 0;
2330 cctx->aes.ccm.tls_aad_len = -1;
2331 return 1;
2332
2333 case EVP_CTRL_GET_IVLEN:
2334 *(int *)ptr = 15 - cctx->aes.ccm.l;
2335 return 1;
2336
2337 case EVP_CTRL_AEAD_TLS1_AAD:
2338 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2339 return 0;
2340
2341 /* Save the aad for later use. */
2342 buf = EVP_CIPHER_CTX_buf_noconst(c);
2343 memcpy(buf, ptr, arg);
2344 cctx->aes.ccm.tls_aad_len = arg;
2345
2346 len = buf[arg - 2] << 8 | buf[arg - 1];
2347 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2348 return 0;
2349
2350 /* Correct length for explicit iv. */
2351 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2352
2353 enc = EVP_CIPHER_CTX_encrypting(c);
2354 if (!enc) {
2355 if (len < cctx->aes.ccm.m)
2356 return 0;
2357
2358 /* Correct length for tag. */
2359 len -= cctx->aes.ccm.m;
2360 }
2361
2362 buf[arg - 2] = len >> 8;
2363 buf[arg - 1] = len & 0xff;
2364
2365 /* Extra padding: tag appended to record. */
2366 return cctx->aes.ccm.m;
2367
2368 case EVP_CTRL_CCM_SET_IV_FIXED:
2369 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2370 return 0;
2371
2372 /* Copy to first part of the iv. */
2373 iv = EVP_CIPHER_CTX_iv_noconst(c);
2374 memcpy(iv, ptr, arg);
2375 return 1;
2376
2377 case EVP_CTRL_AEAD_SET_IVLEN:
2378 arg = 15 - arg;
2379 /* fall-through */
2380
2381 case EVP_CTRL_CCM_SET_L:
2382 if (arg < 2 || arg > 8)
2383 return 0;
2384
2385 cctx->aes.ccm.l = arg;
2386 return 1;
2387
2388 case EVP_CTRL_AEAD_SET_TAG:
2389 if ((arg & 1) || arg < 4 || arg > 16)
2390 return 0;
2391
2392 enc = EVP_CIPHER_CTX_encrypting(c);
2393 if (enc && ptr)
2394 return 0;
2395
2396 if (ptr) {
2397 cctx->aes.ccm.tag_set = 1;
2398 buf = EVP_CIPHER_CTX_buf_noconst(c);
2399 memcpy(buf, ptr, arg);
2400 }
2401
2402 cctx->aes.ccm.m = arg;
2403 return 1;
2404
2405 case EVP_CTRL_AEAD_GET_TAG:
2406 enc = EVP_CIPHER_CTX_encrypting(c);
2407 if (!enc || !cctx->aes.ccm.tag_set)
2408 return 0;
2409
2410 if(arg < cctx->aes.ccm.m)
2411 return 0;
2412
2413 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2414 return 1;
2415
2416 case EVP_CTRL_COPY:
2417 return 1;
2418
2419 default:
2420 return -1;
2421 }
2422 }
2423
2424 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2425
2426 # ifndef OPENSSL_NO_OCB
2427 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2428 # define S390X_aes_128_ocb_CAPABLE 0
2429 # define S390X_aes_192_ocb_CAPABLE 0
2430 # define S390X_aes_256_ocb_CAPABLE 0
2431
2432 # define s390x_aes_ocb_init_key aes_ocb_init_key
2433 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2434 const unsigned char *iv, int enc);
2435 # define s390x_aes_ocb_cipher aes_ocb_cipher
2436 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2437 const unsigned char *in, size_t len);
2438 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2439 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2440 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2441 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2442 # endif
2443
2444 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2445 MODE,flags) \
2446 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2447 nid##_##keylen##_##nmode,blocksize, \
2448 keylen / 8, \
2449 ivlen, \
2450 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \
2451 s390x_aes_##mode##_init_key, \
2452 s390x_aes_##mode##_cipher, \
2453 NULL, \
2454 sizeof(S390X_AES_##MODE##_CTX), \
2455 NULL, \
2456 NULL, \
2457 NULL, \
2458 NULL \
2459 }; \
2460 static const EVP_CIPHER aes_##keylen##_##mode = { \
2461 nid##_##keylen##_##nmode, \
2462 blocksize, \
2463 keylen / 8, \
2464 ivlen, \
2465 flags | EVP_CIPH_##MODE##_MODE, \
2466 aes_init_key, \
2467 aes_##mode##_cipher, \
2468 NULL, \
2469 sizeof(EVP_AES_KEY), \
2470 NULL, \
2471 NULL, \
2472 NULL, \
2473 NULL \
2474 }; \
2475 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2476 { \
2477 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2478 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2479 }
2480
2481 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2482 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2483 nid##_##keylen##_##mode, \
2484 blocksize, \
2485 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2486 ivlen, \
2487 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \
2488 s390x_aes_##mode##_init_key, \
2489 s390x_aes_##mode##_cipher, \
2490 s390x_aes_##mode##_cleanup, \
2491 sizeof(S390X_AES_##MODE##_CTX), \
2492 NULL, \
2493 NULL, \
2494 s390x_aes_##mode##_ctrl, \
2495 NULL \
2496 }; \
2497 static const EVP_CIPHER aes_##keylen##_##mode = { \
2498 nid##_##keylen##_##mode,blocksize, \
2499 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2500 ivlen, \
2501 flags | EVP_CIPH_##MODE##_MODE, \
2502 aes_##mode##_init_key, \
2503 aes_##mode##_cipher, \
2504 aes_##mode##_cleanup, \
2505 sizeof(EVP_AES_##MODE##_CTX), \
2506 NULL, \
2507 NULL, \
2508 aes_##mode##_ctrl, \
2509 NULL \
2510 }; \
2511 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2512 { \
2513 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2514 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2515 }
2516
2517 #else
2518
2519 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2520 static const EVP_CIPHER aes_##keylen##_##mode = { \
2521 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2522 flags|EVP_CIPH_##MODE##_MODE, \
2523 aes_init_key, \
2524 aes_##mode##_cipher, \
2525 NULL, \
2526 sizeof(EVP_AES_KEY), \
2527 NULL,NULL,NULL,NULL }; \
2528 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2529 { return &aes_##keylen##_##mode; }
2530
2531 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2532 static const EVP_CIPHER aes_##keylen##_##mode = { \
2533 nid##_##keylen##_##mode,blocksize, \
2534 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2535 flags|EVP_CIPH_##MODE##_MODE, \
2536 aes_##mode##_init_key, \
2537 aes_##mode##_cipher, \
2538 aes_##mode##_cleanup, \
2539 sizeof(EVP_AES_##MODE##_CTX), \
2540 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2541 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2542 { return &aes_##keylen##_##mode; }
2543
2544 #endif
2545
2546 #if defined(OPENSSL_CPUID_OBJ) && defined(AES_ASM) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2547 # include "arm_arch.h"
2548 # if __ARM_MAX_ARCH__>=7
2549 # if defined(BSAES_ASM)
2550 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2551 # endif
2552 # if defined(VPAES_ASM)
2553 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2554 # endif
2555 # endif
2556 # if __ARM_MAX_ARCH__>=8
2557 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2558 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2559 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2560 # define HWAES_encrypt aes_v8_encrypt
2561 # define HWAES_decrypt aes_v8_decrypt
2562 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2563 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2564 # endif
2565 #endif
2566
2567 #if defined(HWAES_CAPABLE)
2568 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2569 AES_KEY *key);
2570 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2571 AES_KEY *key);
2572 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2573 const AES_KEY *key);
2574 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2575 const AES_KEY *key);
2576 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2577 size_t length, const AES_KEY *key,
2578 unsigned char *ivec, const int enc);
2579 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2580 size_t len, const AES_KEY *key,
2581 const unsigned char ivec[16]);
2582 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2583 size_t len, const AES_KEY *key1,
2584 const AES_KEY *key2, const unsigned char iv[16]);
2585 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2586 size_t len, const AES_KEY *key1,
2587 const AES_KEY *key2, const unsigned char iv[16]);
2588 #endif
2589
2590 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2591 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2592 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2593 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2594 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2595 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2596 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2597 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2598
aes_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2599 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2600 const unsigned char *iv, int enc)
2601 {
2602 int ret, mode;
2603 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2604
2605 mode = EVP_CIPHER_CTX_mode(ctx);
2606 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2607 && !enc) {
2608 #ifdef HWAES_CAPABLE
2609 if (HWAES_CAPABLE) {
2610 ret = HWAES_set_decrypt_key(key,
2611 EVP_CIPHER_CTX_key_length(ctx) * 8,
2612 &dat->ks.ks);
2613 dat->block = (block128_f) HWAES_decrypt;
2614 dat->stream.cbc = NULL;
2615 # ifdef HWAES_cbc_encrypt
2616 if (mode == EVP_CIPH_CBC_MODE)
2617 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2618 # endif
2619 } else
2620 #endif
2621 #ifdef BSAES_CAPABLE
2622 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2623 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2624 &dat->ks.ks);
2625 dat->block = (block128_f) AES_decrypt;
2626 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2627 } else
2628 #endif
2629 #ifdef VPAES_CAPABLE
2630 if (VPAES_CAPABLE) {
2631 ret = vpaes_set_decrypt_key(key,
2632 EVP_CIPHER_CTX_key_length(ctx) * 8,
2633 &dat->ks.ks);
2634 dat->block = (block128_f) vpaes_decrypt;
2635 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2636 (cbc128_f) vpaes_cbc_encrypt : NULL;
2637 } else
2638 #endif
2639 {
2640 ret = AES_set_decrypt_key(key,
2641 EVP_CIPHER_CTX_key_length(ctx) * 8,
2642 &dat->ks.ks);
2643 dat->block = (block128_f) AES_decrypt;
2644 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2645 (cbc128_f) AES_cbc_encrypt : NULL;
2646 }
2647 } else
2648 #ifdef HWAES_CAPABLE
2649 if (HWAES_CAPABLE) {
2650 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2651 &dat->ks.ks);
2652 dat->block = (block128_f) HWAES_encrypt;
2653 dat->stream.cbc = NULL;
2654 # ifdef HWAES_cbc_encrypt
2655 if (mode == EVP_CIPH_CBC_MODE)
2656 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2657 else
2658 # endif
2659 # ifdef HWAES_ctr32_encrypt_blocks
2660 if (mode == EVP_CIPH_CTR_MODE)
2661 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2662 else
2663 # endif
2664 (void)0; /* terminate potentially open 'else' */
2665 } else
2666 #endif
2667 #ifdef BSAES_CAPABLE
2668 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2669 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2670 &dat->ks.ks);
2671 dat->block = (block128_f) AES_encrypt;
2672 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2673 } else
2674 #endif
2675 #ifdef VPAES_CAPABLE
2676 if (VPAES_CAPABLE) {
2677 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2678 &dat->ks.ks);
2679 dat->block = (block128_f) vpaes_encrypt;
2680 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2681 (cbc128_f) vpaes_cbc_encrypt : NULL;
2682 } else
2683 #endif
2684 {
2685 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2686 &dat->ks.ks);
2687 dat->block = (block128_f) AES_encrypt;
2688 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2689 (cbc128_f) AES_cbc_encrypt : NULL;
2690 #ifdef AES_CTR_ASM
2691 if (mode == EVP_CIPH_CTR_MODE)
2692 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2693 #endif
2694 }
2695
2696 if (ret < 0) {
2697 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2698 return 0;
2699 }
2700
2701 return 1;
2702 }
2703
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2704 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2705 const unsigned char *in, size_t len)
2706 {
2707 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2708
2709 if (dat->stream.cbc)
2710 (*dat->stream.cbc) (in, out, len, &dat->ks,
2711 EVP_CIPHER_CTX_iv_noconst(ctx),
2712 EVP_CIPHER_CTX_encrypting(ctx));
2713 else if (EVP_CIPHER_CTX_encrypting(ctx))
2714 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2715 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2716 else
2717 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2718 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2719
2720 return 1;
2721 }
2722
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2723 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2724 const unsigned char *in, size_t len)
2725 {
2726 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2727 size_t i;
2728 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2729
2730 if (len < bl)
2731 return 1;
2732
2733 for (i = 0, len -= bl; i <= len; i += bl)
2734 (*dat->block) (in + i, out + i, &dat->ks);
2735
2736 return 1;
2737 }
2738
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2739 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2740 const unsigned char *in, size_t len)
2741 {
2742 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2743
2744 int num = EVP_CIPHER_CTX_num(ctx);
2745 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2746 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2747 EVP_CIPHER_CTX_set_num(ctx, num);
2748 return 1;
2749 }
2750
aes_cfb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2751 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2752 const unsigned char *in, size_t len)
2753 {
2754 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2755
2756 int num = EVP_CIPHER_CTX_num(ctx);
2757 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2758 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2759 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2760 EVP_CIPHER_CTX_set_num(ctx, num);
2761 return 1;
2762 }
2763
aes_cfb8_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2764 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2765 const unsigned char *in, size_t len)
2766 {
2767 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2768
2769 int num = EVP_CIPHER_CTX_num(ctx);
2770 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2771 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2772 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2773 EVP_CIPHER_CTX_set_num(ctx, num);
2774 return 1;
2775 }
2776
aes_cfb1_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2777 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2778 const unsigned char *in, size_t len)
2779 {
2780 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2781
2782 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2783 int num = EVP_CIPHER_CTX_num(ctx);
2784 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2785 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2786 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2787 EVP_CIPHER_CTX_set_num(ctx, num);
2788 return 1;
2789 }
2790
2791 while (len >= MAXBITCHUNK) {
2792 int num = EVP_CIPHER_CTX_num(ctx);
2793 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2794 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2795 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2796 EVP_CIPHER_CTX_set_num(ctx, num);
2797 len -= MAXBITCHUNK;
2798 out += MAXBITCHUNK;
2799 in += MAXBITCHUNK;
2800 }
2801 if (len) {
2802 int num = EVP_CIPHER_CTX_num(ctx);
2803 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2804 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2805 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2806 EVP_CIPHER_CTX_set_num(ctx, num);
2807 }
2808
2809 return 1;
2810 }
2811
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2812 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2813 const unsigned char *in, size_t len)
2814 {
2815 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2816 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2817
2818 if (dat->stream.ctr)
2819 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2820 EVP_CIPHER_CTX_iv_noconst(ctx),
2821 EVP_CIPHER_CTX_buf_noconst(ctx),
2822 &num, dat->stream.ctr);
2823 else
2824 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2825 EVP_CIPHER_CTX_iv_noconst(ctx),
2826 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2827 dat->block);
2828 EVP_CIPHER_CTX_set_num(ctx, num);
2829 return 1;
2830 }
2831
2832 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2833 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2834 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2835
aes_gcm_cleanup(EVP_CIPHER_CTX * c)2836 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2837 {
2838 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2839 if (gctx == NULL)
2840 return 0;
2841 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2842 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2843 OPENSSL_free(gctx->iv);
2844 return 1;
2845 }
2846
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)2847 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2848 {
2849 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2850 switch (type) {
2851 case EVP_CTRL_INIT:
2852 gctx->key_set = 0;
2853 gctx->iv_set = 0;
2854 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2855 gctx->iv = c->iv;
2856 gctx->taglen = -1;
2857 gctx->iv_gen = 0;
2858 gctx->tls_aad_len = -1;
2859 return 1;
2860
2861 case EVP_CTRL_GET_IVLEN:
2862 *(int *)ptr = gctx->ivlen;
2863 return 1;
2864
2865 case EVP_CTRL_AEAD_SET_IVLEN:
2866 if (arg <= 0)
2867 return 0;
2868 /* Allocate memory for IV if needed */
2869 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2870 if (gctx->iv != c->iv)
2871 OPENSSL_free(gctx->iv);
2872 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2873 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2874 return 0;
2875 }
2876 }
2877 gctx->ivlen = arg;
2878 return 1;
2879
2880 case EVP_CTRL_AEAD_SET_TAG:
2881 if (arg <= 0 || arg > 16 || c->encrypt)
2882 return 0;
2883 memcpy(c->buf, ptr, arg);
2884 gctx->taglen = arg;
2885 return 1;
2886
2887 case EVP_CTRL_AEAD_GET_TAG:
2888 if (arg <= 0 || arg > 16 || !c->encrypt
2889 || gctx->taglen < 0)
2890 return 0;
2891 memcpy(ptr, c->buf, arg);
2892 return 1;
2893
2894 case EVP_CTRL_GCM_SET_IV_FIXED:
2895 /* Special case: -1 length restores whole IV */
2896 if (arg == -1) {
2897 memcpy(gctx->iv, ptr, gctx->ivlen);
2898 gctx->iv_gen = 1;
2899 return 1;
2900 }
2901 /*
2902 * Fixed field must be at least 4 bytes and invocation field at least
2903 * 8.
2904 */
2905 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2906 return 0;
2907 if (arg)
2908 memcpy(gctx->iv, ptr, arg);
2909 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2910 return 0;
2911 gctx->iv_gen = 1;
2912 return 1;
2913
2914 case EVP_CTRL_GCM_IV_GEN:
2915 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2916 return 0;
2917 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2918 if (arg <= 0 || arg > gctx->ivlen)
2919 arg = gctx->ivlen;
2920 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2921 /*
2922 * Invocation field will be at least 8 bytes in size and so no need
2923 * to check wrap around or increment more than last 8 bytes.
2924 */
2925 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2926 gctx->iv_set = 1;
2927 return 1;
2928
2929 case EVP_CTRL_GCM_SET_IV_INV:
2930 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2931 return 0;
2932 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2933 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2934 gctx->iv_set = 1;
2935 return 1;
2936
2937 case EVP_CTRL_AEAD_TLS1_AAD:
2938 /* Save the AAD for later use */
2939 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2940 return 0;
2941 memcpy(c->buf, ptr, arg);
2942 gctx->tls_aad_len = arg;
2943 {
2944 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2945 /* Correct length for explicit IV */
2946 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2947 return 0;
2948 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2949 /* If decrypting correct for tag too */
2950 if (!c->encrypt) {
2951 if (len < EVP_GCM_TLS_TAG_LEN)
2952 return 0;
2953 len -= EVP_GCM_TLS_TAG_LEN;
2954 }
2955 c->buf[arg - 2] = len >> 8;
2956 c->buf[arg - 1] = len & 0xff;
2957 }
2958 /* Extra padding: tag appended to record */
2959 return EVP_GCM_TLS_TAG_LEN;
2960
2961 case EVP_CTRL_COPY:
2962 {
2963 EVP_CIPHER_CTX *out = ptr;
2964 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2965 if (gctx->gcm.key) {
2966 if (gctx->gcm.key != &gctx->ks)
2967 return 0;
2968 gctx_out->gcm.key = &gctx_out->ks;
2969 }
2970 if (gctx->iv == c->iv)
2971 gctx_out->iv = out->iv;
2972 else {
2973 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2974 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2975 return 0;
2976 }
2977 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2978 }
2979 return 1;
2980 }
2981
2982 default:
2983 return -1;
2984
2985 }
2986 }
2987
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2988 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2989 const unsigned char *iv, int enc)
2990 {
2991 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2992 if (!iv && !key)
2993 return 1;
2994 if (key) {
2995 do {
2996 #ifdef HWAES_CAPABLE
2997 if (HWAES_CAPABLE) {
2998 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2999 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3000 (block128_f) HWAES_encrypt);
3001 # ifdef HWAES_ctr32_encrypt_blocks
3002 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3003 # else
3004 gctx->ctr = NULL;
3005 # endif
3006 break;
3007 } else
3008 #endif
3009 #ifdef BSAES_CAPABLE
3010 if (BSAES_CAPABLE) {
3011 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3012 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3013 (block128_f) AES_encrypt);
3014 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3015 break;
3016 } else
3017 #endif
3018 #ifdef VPAES_CAPABLE
3019 if (VPAES_CAPABLE) {
3020 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3021 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3022 (block128_f) vpaes_encrypt);
3023 gctx->ctr = NULL;
3024 break;
3025 } else
3026 #endif
3027 (void)0; /* terminate potentially open 'else' */
3028
3029 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3030 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3031 (block128_f) AES_encrypt);
3032 #ifdef AES_CTR_ASM
3033 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3034 #else
3035 gctx->ctr = NULL;
3036 #endif
3037 } while (0);
3038
3039 /*
3040 * If we have an iv can set it directly, otherwise use saved IV.
3041 */
3042 if (iv == NULL && gctx->iv_set)
3043 iv = gctx->iv;
3044 if (iv) {
3045 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3046 gctx->iv_set = 1;
3047 }
3048 gctx->key_set = 1;
3049 } else {
3050 /* If key set use IV, otherwise copy */
3051 if (gctx->key_set)
3052 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3053 else
3054 memcpy(gctx->iv, iv, gctx->ivlen);
3055 gctx->iv_set = 1;
3056 gctx->iv_gen = 0;
3057 }
3058 return 1;
3059 }
3060
3061 /*
3062 * Handle TLS GCM packet format. This consists of the last portion of the IV
3063 * followed by the payload and finally the tag. On encrypt generate IV,
3064 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3065 * and verify tag.
3066 */
3067
aes_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3068 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3069 const unsigned char *in, size_t len)
3070 {
3071 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3072 int rv = -1;
3073 /* Encrypt/decrypt must be performed in place */
3074 if (out != in
3075 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3076 return -1;
3077 /*
3078 * Set IV from start of buffer or generate IV and write to start of
3079 * buffer.
3080 */
3081 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3082 : EVP_CTRL_GCM_SET_IV_INV,
3083 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3084 goto err;
3085 /* Use saved AAD */
3086 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3087 goto err;
3088 /* Fix buffer and length to point to payload */
3089 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3090 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3091 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3092 if (ctx->encrypt) {
3093 /* Encrypt payload */
3094 if (gctx->ctr) {
3095 size_t bulk = 0;
3096 #if defined(AES_GCM_ASM)
3097 if (len >= 32 && AES_GCM_ASM(gctx)) {
3098 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3099 return -1;
3100
3101 bulk = AES_gcm_encrypt(in, out, len,
3102 gctx->gcm.key,
3103 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3104 gctx->gcm.len.u[1] += bulk;
3105 }
3106 #endif
3107 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3108 in + bulk,
3109 out + bulk,
3110 len - bulk, gctx->ctr))
3111 goto err;
3112 } else {
3113 size_t bulk = 0;
3114 #if defined(AES_GCM_ASM2)
3115 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3116 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3117 return -1;
3118
3119 bulk = AES_gcm_encrypt(in, out, len,
3120 gctx->gcm.key,
3121 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3122 gctx->gcm.len.u[1] += bulk;
3123 }
3124 #endif
3125 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3126 in + bulk, out + bulk, len - bulk))
3127 goto err;
3128 }
3129 out += len;
3130 /* Finally write tag */
3131 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3132 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3133 } else {
3134 /* Decrypt */
3135 if (gctx->ctr) {
3136 size_t bulk = 0;
3137 #if defined(AES_GCM_ASM)
3138 if (len >= 16 && AES_GCM_ASM(gctx)) {
3139 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3140 return -1;
3141
3142 bulk = AES_gcm_decrypt(in, out, len,
3143 gctx->gcm.key,
3144 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3145 gctx->gcm.len.u[1] += bulk;
3146 }
3147 #endif
3148 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3149 in + bulk,
3150 out + bulk,
3151 len - bulk, gctx->ctr))
3152 goto err;
3153 } else {
3154 size_t bulk = 0;
3155 #if defined(AES_GCM_ASM2)
3156 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3157 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3158 return -1;
3159
3160 bulk = AES_gcm_decrypt(in, out, len,
3161 gctx->gcm.key,
3162 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3163 gctx->gcm.len.u[1] += bulk;
3164 }
3165 #endif
3166 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3167 in + bulk, out + bulk, len - bulk))
3168 goto err;
3169 }
3170 /* Retrieve tag */
3171 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3172 /* If tag mismatch wipe buffer */
3173 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3174 OPENSSL_cleanse(out, len);
3175 goto err;
3176 }
3177 rv = len;
3178 }
3179
3180 err:
3181 gctx->iv_set = 0;
3182 gctx->tls_aad_len = -1;
3183 return rv;
3184 }
3185
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3186 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3187 const unsigned char *in, size_t len)
3188 {
3189 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3190 /* If not set up, return error */
3191 if (!gctx->key_set)
3192 return -1;
3193
3194 if (gctx->tls_aad_len >= 0)
3195 return aes_gcm_tls_cipher(ctx, out, in, len);
3196
3197 if (!gctx->iv_set)
3198 return -1;
3199 if (in) {
3200 if (out == NULL) {
3201 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3202 return -1;
3203 } else if (ctx->encrypt) {
3204 if (gctx->ctr) {
3205 size_t bulk = 0;
3206 #if defined(AES_GCM_ASM)
3207 if (len >= 32 && AES_GCM_ASM(gctx)) {
3208 size_t res = (16 - gctx->gcm.mres) % 16;
3209
3210 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3211 return -1;
3212
3213 bulk = AES_gcm_encrypt(in + res,
3214 out + res, len - res,
3215 gctx->gcm.key, gctx->gcm.Yi.c,
3216 gctx->gcm.Xi.u);
3217 gctx->gcm.len.u[1] += bulk;
3218 bulk += res;
3219 }
3220 #endif
3221 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3222 in + bulk,
3223 out + bulk,
3224 len - bulk, gctx->ctr))
3225 return -1;
3226 } else {
3227 size_t bulk = 0;
3228 #if defined(AES_GCM_ASM2)
3229 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3230 size_t res = (16 - gctx->gcm.mres) % 16;
3231
3232 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3233 return -1;
3234
3235 bulk = AES_gcm_encrypt(in + res,
3236 out + res, len - res,
3237 gctx->gcm.key, gctx->gcm.Yi.c,
3238 gctx->gcm.Xi.u);
3239 gctx->gcm.len.u[1] += bulk;
3240 bulk += res;
3241 }
3242 #endif
3243 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3244 in + bulk, out + bulk, len - bulk))
3245 return -1;
3246 }
3247 } else {
3248 if (gctx->ctr) {
3249 size_t bulk = 0;
3250 #if defined(AES_GCM_ASM)
3251 if (len >= 16 && AES_GCM_ASM(gctx)) {
3252 size_t res = (16 - gctx->gcm.mres) % 16;
3253
3254 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3255 return -1;
3256
3257 bulk = AES_gcm_decrypt(in + res,
3258 out + res, len - res,
3259 gctx->gcm.key,
3260 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3261 gctx->gcm.len.u[1] += bulk;
3262 bulk += res;
3263 }
3264 #endif
3265 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3266 in + bulk,
3267 out + bulk,
3268 len - bulk, gctx->ctr))
3269 return -1;
3270 } else {
3271 size_t bulk = 0;
3272 #if defined(AES_GCM_ASM2)
3273 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3274 size_t res = (16 - gctx->gcm.mres) % 16;
3275
3276 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3277 return -1;
3278
3279 bulk = AES_gcm_decrypt(in + res,
3280 out + res, len - res,
3281 gctx->gcm.key,
3282 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3283 gctx->gcm.len.u[1] += bulk;
3284 bulk += res;
3285 }
3286 #endif
3287 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3288 in + bulk, out + bulk, len - bulk))
3289 return -1;
3290 }
3291 }
3292 return len;
3293 } else {
3294 if (!ctx->encrypt) {
3295 if (gctx->taglen < 0)
3296 return -1;
3297 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3298 return -1;
3299 gctx->iv_set = 0;
3300 return 0;
3301 }
3302 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3303 gctx->taglen = 16;
3304 /* Don't reuse the IV */
3305 gctx->iv_set = 0;
3306 return 0;
3307 }
3308
3309 }
3310
3311 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3312 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3313 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3314 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3315
3316 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3317 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3318 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3319 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3320 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3321 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3322
aes_xts_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3323 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3324 {
3325 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3326
3327 if (type == EVP_CTRL_COPY) {
3328 EVP_CIPHER_CTX *out = ptr;
3329 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3330
3331 if (xctx->xts.key1) {
3332 if (xctx->xts.key1 != &xctx->ks1)
3333 return 0;
3334 xctx_out->xts.key1 = &xctx_out->ks1;
3335 }
3336 if (xctx->xts.key2) {
3337 if (xctx->xts.key2 != &xctx->ks2)
3338 return 0;
3339 xctx_out->xts.key2 = &xctx_out->ks2;
3340 }
3341 return 1;
3342 } else if (type != EVP_CTRL_INIT)
3343 return -1;
3344 /* key1 and key2 are used as an indicator both key and IV are set */
3345 xctx->xts.key1 = NULL;
3346 xctx->xts.key2 = NULL;
3347 return 1;
3348 }
3349
aes_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3350 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3351 const unsigned char *iv, int enc)
3352 {
3353 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3354
3355 if (!iv && !key)
3356 return 1;
3357
3358 if (key)
3359 do {
3360 /* The key is two half length keys in reality */
3361 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3362
3363 /*
3364 * Verify that the two keys are different.
3365 *
3366 * This addresses the vulnerability described in Rogaway's
3367 * September 2004 paper:
3368 *
3369 * "Efficient Instantiations of Tweakable Blockciphers and
3370 * Refinements to Modes OCB and PMAC".
3371 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3372 *
3373 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3374 * that:
3375 * "The check for Key_1 != Key_2 shall be done at any place
3376 * BEFORE using the keys in the XTS-AES algorithm to process
3377 * data with them."
3378 */
3379 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3380 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3381 return 0;
3382 }
3383
3384 #ifdef AES_XTS_ASM
3385 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3386 #else
3387 xctx->stream = NULL;
3388 #endif
3389 /* key_len is two AES keys */
3390 #ifdef HWAES_CAPABLE
3391 if (HWAES_CAPABLE) {
3392 if (enc) {
3393 HWAES_set_encrypt_key(key,
3394 EVP_CIPHER_CTX_key_length(ctx) * 4,
3395 &xctx->ks1.ks);
3396 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3397 # ifdef HWAES_xts_encrypt
3398 xctx->stream = HWAES_xts_encrypt;
3399 # endif
3400 } else {
3401 HWAES_set_decrypt_key(key,
3402 EVP_CIPHER_CTX_key_length(ctx) * 4,
3403 &xctx->ks1.ks);
3404 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3405 # ifdef HWAES_xts_decrypt
3406 xctx->stream = HWAES_xts_decrypt;
3407 #endif
3408 }
3409
3410 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3411 EVP_CIPHER_CTX_key_length(ctx) * 4,
3412 &xctx->ks2.ks);
3413 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3414
3415 xctx->xts.key1 = &xctx->ks1;
3416 break;
3417 } else
3418 #endif
3419 #ifdef BSAES_CAPABLE
3420 if (BSAES_CAPABLE)
3421 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3422 else
3423 #endif
3424 #ifdef VPAES_CAPABLE
3425 if (VPAES_CAPABLE) {
3426 if (enc) {
3427 vpaes_set_encrypt_key(key,
3428 EVP_CIPHER_CTX_key_length(ctx) * 4,
3429 &xctx->ks1.ks);
3430 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3431 } else {
3432 vpaes_set_decrypt_key(key,
3433 EVP_CIPHER_CTX_key_length(ctx) * 4,
3434 &xctx->ks1.ks);
3435 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3436 }
3437
3438 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3439 EVP_CIPHER_CTX_key_length(ctx) * 4,
3440 &xctx->ks2.ks);
3441 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3442
3443 xctx->xts.key1 = &xctx->ks1;
3444 break;
3445 } else
3446 #endif
3447 (void)0; /* terminate potentially open 'else' */
3448
3449 if (enc) {
3450 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3451 &xctx->ks1.ks);
3452 xctx->xts.block1 = (block128_f) AES_encrypt;
3453 } else {
3454 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3455 &xctx->ks1.ks);
3456 xctx->xts.block1 = (block128_f) AES_decrypt;
3457 }
3458
3459 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3460 EVP_CIPHER_CTX_key_length(ctx) * 4,
3461 &xctx->ks2.ks);
3462 xctx->xts.block2 = (block128_f) AES_encrypt;
3463
3464 xctx->xts.key1 = &xctx->ks1;
3465 } while (0);
3466
3467 if (iv) {
3468 xctx->xts.key2 = &xctx->ks2;
3469 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3470 }
3471
3472 return 1;
3473 }
3474
aes_xts_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3475 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3476 const unsigned char *in, size_t len)
3477 {
3478 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3479 if (!xctx->xts.key1 || !xctx->xts.key2)
3480 return 0;
3481 if (!out || !in || len < AES_BLOCK_SIZE)
3482 return 0;
3483 if (xctx->stream)
3484 (*xctx->stream) (in, out, len,
3485 xctx->xts.key1, xctx->xts.key2,
3486 EVP_CIPHER_CTX_iv_noconst(ctx));
3487 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3488 in, out, len,
3489 EVP_CIPHER_CTX_encrypting(ctx)))
3490 return 0;
3491 return 1;
3492 }
3493
3494 #define aes_xts_cleanup NULL
3495
3496 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3497 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3498 | EVP_CIPH_CUSTOM_COPY)
3499
3500 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3501 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3502
aes_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3503 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3504 {
3505 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3506 switch (type) {
3507 case EVP_CTRL_INIT:
3508 cctx->key_set = 0;
3509 cctx->iv_set = 0;
3510 cctx->L = 8;
3511 cctx->M = 12;
3512 cctx->tag_set = 0;
3513 cctx->len_set = 0;
3514 cctx->tls_aad_len = -1;
3515 return 1;
3516 case EVP_CTRL_GET_IVLEN:
3517 *(int *)ptr = 15 - cctx->L;
3518 return 1;
3519 case EVP_CTRL_AEAD_TLS1_AAD:
3520 /* Save the AAD for later use */
3521 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3522 return 0;
3523 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3524 cctx->tls_aad_len = arg;
3525 {
3526 uint16_t len =
3527 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3528 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3529 /* Correct length for explicit IV */
3530 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3531 return 0;
3532 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3533 /* If decrypting correct for tag too */
3534 if (!EVP_CIPHER_CTX_encrypting(c)) {
3535 if (len < cctx->M)
3536 return 0;
3537 len -= cctx->M;
3538 }
3539 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3540 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3541 }
3542 /* Extra padding: tag appended to record */
3543 return cctx->M;
3544
3545 case EVP_CTRL_CCM_SET_IV_FIXED:
3546 /* Sanity check length */
3547 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3548 return 0;
3549 /* Just copy to first part of IV */
3550 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3551 return 1;
3552
3553 case EVP_CTRL_AEAD_SET_IVLEN:
3554 arg = 15 - arg;
3555 /* fall thru */
3556 case EVP_CTRL_CCM_SET_L:
3557 if (arg < 2 || arg > 8)
3558 return 0;
3559 cctx->L = arg;
3560 return 1;
3561
3562 case EVP_CTRL_AEAD_SET_TAG:
3563 if ((arg & 1) || arg < 4 || arg > 16)
3564 return 0;
3565 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3566 return 0;
3567 if (ptr) {
3568 cctx->tag_set = 1;
3569 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3570 }
3571 cctx->M = arg;
3572 return 1;
3573
3574 case EVP_CTRL_AEAD_GET_TAG:
3575 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3576 return 0;
3577 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3578 return 0;
3579 cctx->tag_set = 0;
3580 cctx->iv_set = 0;
3581 cctx->len_set = 0;
3582 return 1;
3583
3584 case EVP_CTRL_COPY:
3585 {
3586 EVP_CIPHER_CTX *out = ptr;
3587 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3588 if (cctx->ccm.key) {
3589 if (cctx->ccm.key != &cctx->ks)
3590 return 0;
3591 cctx_out->ccm.key = &cctx_out->ks;
3592 }
3593 return 1;
3594 }
3595
3596 default:
3597 return -1;
3598
3599 }
3600 }
3601
aes_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3602 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3603 const unsigned char *iv, int enc)
3604 {
3605 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3606 if (!iv && !key)
3607 return 1;
3608 if (key)
3609 do {
3610 #ifdef HWAES_CAPABLE
3611 if (HWAES_CAPABLE) {
3612 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3613 &cctx->ks.ks);
3614
3615 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3616 &cctx->ks, (block128_f) HWAES_encrypt);
3617 cctx->str = NULL;
3618 cctx->key_set = 1;
3619 break;
3620 } else
3621 #endif
3622 #ifdef VPAES_CAPABLE
3623 if (VPAES_CAPABLE) {
3624 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3625 &cctx->ks.ks);
3626 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3627 &cctx->ks, (block128_f) vpaes_encrypt);
3628 cctx->str = NULL;
3629 cctx->key_set = 1;
3630 break;
3631 }
3632 #endif
3633 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3634 &cctx->ks.ks);
3635 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3636 &cctx->ks, (block128_f) AES_encrypt);
3637 cctx->str = NULL;
3638 cctx->key_set = 1;
3639 } while (0);
3640 if (iv) {
3641 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3642 cctx->iv_set = 1;
3643 }
3644 return 1;
3645 }
3646
aes_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3647 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3648 const unsigned char *in, size_t len)
3649 {
3650 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3651 CCM128_CONTEXT *ccm = &cctx->ccm;
3652 /* Encrypt/decrypt must be performed in place */
3653 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3654 return -1;
3655 /* If encrypting set explicit IV from sequence number (start of AAD) */
3656 if (EVP_CIPHER_CTX_encrypting(ctx))
3657 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3658 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3659 /* Get rest of IV from explicit IV */
3660 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3661 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3662 /* Correct length value */
3663 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3664 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3665 len))
3666 return -1;
3667 /* Use saved AAD */
3668 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3669 /* Fix buffer to point to payload */
3670 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3671 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3672 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3673 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3674 cctx->str) :
3675 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3676 return -1;
3677 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3678 return -1;
3679 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3680 } else {
3681 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3682 cctx->str) :
3683 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3684 unsigned char tag[16];
3685 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3686 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3687 return len;
3688 }
3689 }
3690 OPENSSL_cleanse(out, len);
3691 return -1;
3692 }
3693 }
3694
aes_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3695 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3696 const unsigned char *in, size_t len)
3697 {
3698 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3699 CCM128_CONTEXT *ccm = &cctx->ccm;
3700 /* If not set up, return error */
3701 if (!cctx->key_set)
3702 return -1;
3703
3704 if (cctx->tls_aad_len >= 0)
3705 return aes_ccm_tls_cipher(ctx, out, in, len);
3706
3707 /* EVP_*Final() doesn't return any data */
3708 if (in == NULL && out != NULL)
3709 return 0;
3710
3711 if (!cctx->iv_set)
3712 return -1;
3713
3714 if (!out) {
3715 if (!in) {
3716 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3717 15 - cctx->L, len))
3718 return -1;
3719 cctx->len_set = 1;
3720 return len;
3721 }
3722 /* If have AAD need message length */
3723 if (!cctx->len_set && len)
3724 return -1;
3725 CRYPTO_ccm128_aad(ccm, in, len);
3726 return len;
3727 }
3728
3729 /* The tag must be set before actually decrypting data */
3730 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3731 return -1;
3732
3733 /* If not set length yet do it */
3734 if (!cctx->len_set) {
3735 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3736 15 - cctx->L, len))
3737 return -1;
3738 cctx->len_set = 1;
3739 }
3740 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3741 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3742 cctx->str) :
3743 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3744 return -1;
3745 cctx->tag_set = 1;
3746 return len;
3747 } else {
3748 int rv = -1;
3749 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3750 cctx->str) :
3751 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3752 unsigned char tag[16];
3753 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3754 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3755 cctx->M))
3756 rv = len;
3757 }
3758 }
3759 if (rv == -1)
3760 OPENSSL_cleanse(out, len);
3761 cctx->iv_set = 0;
3762 cctx->tag_set = 0;
3763 cctx->len_set = 0;
3764 return rv;
3765 }
3766 }
3767
3768 #define aes_ccm_cleanup NULL
3769
3770 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3771 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3772 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3773 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3774 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3775 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3776
3777 typedef struct {
3778 union {
3779 double align;
3780 AES_KEY ks;
3781 } ks;
3782 /* Indicates if IV has been set */
3783 unsigned char *iv;
3784 } EVP_AES_WRAP_CTX;
3785
aes_wrap_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3786 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3787 const unsigned char *iv, int enc)
3788 {
3789 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3790 if (!iv && !key)
3791 return 1;
3792 if (key) {
3793 if (EVP_CIPHER_CTX_encrypting(ctx))
3794 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3795 &wctx->ks.ks);
3796 else
3797 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3798 &wctx->ks.ks);
3799 if (!iv)
3800 wctx->iv = NULL;
3801 }
3802 if (iv) {
3803 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3804 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3805 }
3806 return 1;
3807 }
3808
aes_wrap_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t inlen)3809 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3810 const unsigned char *in, size_t inlen)
3811 {
3812 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3813 size_t rv;
3814 /* AES wrap with padding has IV length of 4, without padding 8 */
3815 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3816 /* No final operation so always return zero length */
3817 if (!in)
3818 return 0;
3819 /* Input length must always be non-zero */
3820 if (!inlen)
3821 return -1;
3822 /* If decrypting need at least 16 bytes and multiple of 8 */
3823 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3824 return -1;
3825 /* If not padding input must be multiple of 8 */
3826 if (!pad && inlen & 0x7)
3827 return -1;
3828 if (is_partially_overlapping(out, in, inlen)) {
3829 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3830 return 0;
3831 }
3832 if (!out) {
3833 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3834 /* If padding round up to multiple of 8 */
3835 if (pad)
3836 inlen = (inlen + 7) / 8 * 8;
3837 /* 8 byte prefix */
3838 return inlen + 8;
3839 } else {
3840 /*
3841 * If not padding output will be exactly 8 bytes smaller than
3842 * input. If padding it will be at least 8 bytes smaller but we
3843 * don't know how much.
3844 */
3845 return inlen - 8;
3846 }
3847 }
3848 if (pad) {
3849 if (EVP_CIPHER_CTX_encrypting(ctx))
3850 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3851 out, in, inlen,
3852 (block128_f) AES_encrypt);
3853 else
3854 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3855 out, in, inlen,
3856 (block128_f) AES_decrypt);
3857 } else {
3858 if (EVP_CIPHER_CTX_encrypting(ctx))
3859 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3860 out, in, inlen, (block128_f) AES_encrypt);
3861 else
3862 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3863 out, in, inlen, (block128_f) AES_decrypt);
3864 }
3865 return rv ? (int)rv : -1;
3866 }
3867
3868 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3869 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3870 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3871
3872 static const EVP_CIPHER aes_128_wrap = {
3873 NID_id_aes128_wrap,
3874 8, 16, 8, WRAP_FLAGS,
3875 aes_wrap_init_key, aes_wrap_cipher,
3876 NULL,
3877 sizeof(EVP_AES_WRAP_CTX),
3878 NULL, NULL, NULL, NULL
3879 };
3880
EVP_aes_128_wrap(void)3881 const EVP_CIPHER *EVP_aes_128_wrap(void)
3882 {
3883 return &aes_128_wrap;
3884 }
3885
3886 static const EVP_CIPHER aes_192_wrap = {
3887 NID_id_aes192_wrap,
3888 8, 24, 8, WRAP_FLAGS,
3889 aes_wrap_init_key, aes_wrap_cipher,
3890 NULL,
3891 sizeof(EVP_AES_WRAP_CTX),
3892 NULL, NULL, NULL, NULL
3893 };
3894
EVP_aes_192_wrap(void)3895 const EVP_CIPHER *EVP_aes_192_wrap(void)
3896 {
3897 return &aes_192_wrap;
3898 }
3899
3900 static const EVP_CIPHER aes_256_wrap = {
3901 NID_id_aes256_wrap,
3902 8, 32, 8, WRAP_FLAGS,
3903 aes_wrap_init_key, aes_wrap_cipher,
3904 NULL,
3905 sizeof(EVP_AES_WRAP_CTX),
3906 NULL, NULL, NULL, NULL
3907 };
3908
EVP_aes_256_wrap(void)3909 const EVP_CIPHER *EVP_aes_256_wrap(void)
3910 {
3911 return &aes_256_wrap;
3912 }
3913
3914 static const EVP_CIPHER aes_128_wrap_pad = {
3915 NID_id_aes128_wrap_pad,
3916 8, 16, 4, WRAP_FLAGS,
3917 aes_wrap_init_key, aes_wrap_cipher,
3918 NULL,
3919 sizeof(EVP_AES_WRAP_CTX),
3920 NULL, NULL, NULL, NULL
3921 };
3922
EVP_aes_128_wrap_pad(void)3923 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3924 {
3925 return &aes_128_wrap_pad;
3926 }
3927
3928 static const EVP_CIPHER aes_192_wrap_pad = {
3929 NID_id_aes192_wrap_pad,
3930 8, 24, 4, WRAP_FLAGS,
3931 aes_wrap_init_key, aes_wrap_cipher,
3932 NULL,
3933 sizeof(EVP_AES_WRAP_CTX),
3934 NULL, NULL, NULL, NULL
3935 };
3936
EVP_aes_192_wrap_pad(void)3937 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3938 {
3939 return &aes_192_wrap_pad;
3940 }
3941
3942 static const EVP_CIPHER aes_256_wrap_pad = {
3943 NID_id_aes256_wrap_pad,
3944 8, 32, 4, WRAP_FLAGS,
3945 aes_wrap_init_key, aes_wrap_cipher,
3946 NULL,
3947 sizeof(EVP_AES_WRAP_CTX),
3948 NULL, NULL, NULL, NULL
3949 };
3950
EVP_aes_256_wrap_pad(void)3951 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3952 {
3953 return &aes_256_wrap_pad;
3954 }
3955
3956 #ifndef OPENSSL_NO_OCB
aes_ocb_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3957 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3958 {
3959 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3960 EVP_CIPHER_CTX *newc;
3961 EVP_AES_OCB_CTX *new_octx;
3962
3963 switch (type) {
3964 case EVP_CTRL_INIT:
3965 octx->key_set = 0;
3966 octx->iv_set = 0;
3967 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3968 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3969 octx->taglen = 16;
3970 octx->data_buf_len = 0;
3971 octx->aad_buf_len = 0;
3972 return 1;
3973
3974 case EVP_CTRL_GET_IVLEN:
3975 *(int *)ptr = octx->ivlen;
3976 return 1;
3977
3978 case EVP_CTRL_AEAD_SET_IVLEN:
3979 /* IV len must be 1 to 15 */
3980 if (arg <= 0 || arg > 15)
3981 return 0;
3982
3983 octx->ivlen = arg;
3984 return 1;
3985
3986 case EVP_CTRL_AEAD_SET_TAG:
3987 if (!ptr) {
3988 /* Tag len must be 0 to 16 */
3989 if (arg < 0 || arg > 16)
3990 return 0;
3991
3992 octx->taglen = arg;
3993 return 1;
3994 }
3995 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3996 return 0;
3997 memcpy(octx->tag, ptr, arg);
3998 return 1;
3999
4000 case EVP_CTRL_AEAD_GET_TAG:
4001 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4002 return 0;
4003
4004 memcpy(ptr, octx->tag, arg);
4005 return 1;
4006
4007 case EVP_CTRL_COPY:
4008 newc = (EVP_CIPHER_CTX *)ptr;
4009 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4010 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4011 &new_octx->ksenc.ks,
4012 &new_octx->ksdec.ks);
4013
4014 default:
4015 return -1;
4016
4017 }
4018 }
4019
4020 # ifdef HWAES_CAPABLE
4021 # ifdef HWAES_ocb_encrypt
4022 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4023 size_t blocks, const void *key,
4024 size_t start_block_num,
4025 unsigned char offset_i[16],
4026 const unsigned char L_[][16],
4027 unsigned char checksum[16]);
4028 # else
4029 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4030 # endif
4031 # ifdef HWAES_ocb_decrypt
4032 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4033 size_t blocks, const void *key,
4034 size_t start_block_num,
4035 unsigned char offset_i[16],
4036 const unsigned char L_[][16],
4037 unsigned char checksum[16]);
4038 # else
4039 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4040 # endif
4041 # endif
4042
aes_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)4043 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4044 const unsigned char *iv, int enc)
4045 {
4046 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4047 if (!iv && !key)
4048 return 1;
4049 if (key) {
4050 do {
4051 /*
4052 * We set both the encrypt and decrypt key here because decrypt
4053 * needs both. We could possibly optimise to remove setting the
4054 * decrypt for an encryption operation.
4055 */
4056 # ifdef HWAES_CAPABLE
4057 if (HWAES_CAPABLE) {
4058 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4059 &octx->ksenc.ks);
4060 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4061 &octx->ksdec.ks);
4062 if (!CRYPTO_ocb128_init(&octx->ocb,
4063 &octx->ksenc.ks, &octx->ksdec.ks,
4064 (block128_f) HWAES_encrypt,
4065 (block128_f) HWAES_decrypt,
4066 enc ? HWAES_ocb_encrypt
4067 : HWAES_ocb_decrypt))
4068 return 0;
4069 break;
4070 }
4071 # endif
4072 # ifdef VPAES_CAPABLE
4073 if (VPAES_CAPABLE) {
4074 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4075 &octx->ksenc.ks);
4076 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4077 &octx->ksdec.ks);
4078 if (!CRYPTO_ocb128_init(&octx->ocb,
4079 &octx->ksenc.ks, &octx->ksdec.ks,
4080 (block128_f) vpaes_encrypt,
4081 (block128_f) vpaes_decrypt,
4082 NULL))
4083 return 0;
4084 break;
4085 }
4086 # endif
4087 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4088 &octx->ksenc.ks);
4089 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4090 &octx->ksdec.ks);
4091 if (!CRYPTO_ocb128_init(&octx->ocb,
4092 &octx->ksenc.ks, &octx->ksdec.ks,
4093 (block128_f) AES_encrypt,
4094 (block128_f) AES_decrypt,
4095 NULL))
4096 return 0;
4097 }
4098 while (0);
4099
4100 /*
4101 * If we have an iv we can set it directly, otherwise use saved IV.
4102 */
4103 if (iv == NULL && octx->iv_set)
4104 iv = octx->iv;
4105 if (iv) {
4106 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4107 != 1)
4108 return 0;
4109 octx->iv_set = 1;
4110 }
4111 octx->key_set = 1;
4112 } else {
4113 /* If key set use IV, otherwise copy */
4114 if (octx->key_set)
4115 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4116 else
4117 memcpy(octx->iv, iv, octx->ivlen);
4118 octx->iv_set = 1;
4119 }
4120 return 1;
4121 }
4122
aes_ocb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)4123 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4124 const unsigned char *in, size_t len)
4125 {
4126 unsigned char *buf;
4127 int *buf_len;
4128 int written_len = 0;
4129 size_t trailing_len;
4130 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4131
4132 /* If IV or Key not set then return error */
4133 if (!octx->iv_set)
4134 return -1;
4135
4136 if (!octx->key_set)
4137 return -1;
4138
4139 if (in != NULL) {
4140 /*
4141 * Need to ensure we are only passing full blocks to low level OCB
4142 * routines. We do it here rather than in EVP_EncryptUpdate/
4143 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4144 * and those routines don't support that
4145 */
4146
4147 /* Are we dealing with AAD or normal data here? */
4148 if (out == NULL) {
4149 buf = octx->aad_buf;
4150 buf_len = &(octx->aad_buf_len);
4151 } else {
4152 buf = octx->data_buf;
4153 buf_len = &(octx->data_buf_len);
4154
4155 if (is_partially_overlapping(out + *buf_len, in, len)) {
4156 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4157 return 0;
4158 }
4159 }
4160
4161 /*
4162 * If we've got a partially filled buffer from a previous call then
4163 * use that data first
4164 */
4165 if (*buf_len > 0) {
4166 unsigned int remaining;
4167
4168 remaining = AES_BLOCK_SIZE - (*buf_len);
4169 if (remaining > len) {
4170 memcpy(buf + (*buf_len), in, len);
4171 *(buf_len) += len;
4172 return 0;
4173 }
4174 memcpy(buf + (*buf_len), in, remaining);
4175
4176 /*
4177 * If we get here we've filled the buffer, so process it
4178 */
4179 len -= remaining;
4180 in += remaining;
4181 if (out == NULL) {
4182 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4183 return -1;
4184 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4185 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4186 AES_BLOCK_SIZE))
4187 return -1;
4188 } else {
4189 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4190 AES_BLOCK_SIZE))
4191 return -1;
4192 }
4193 written_len = AES_BLOCK_SIZE;
4194 *buf_len = 0;
4195 if (out != NULL)
4196 out += AES_BLOCK_SIZE;
4197 }
4198
4199 /* Do we have a partial block to handle at the end? */
4200 trailing_len = len % AES_BLOCK_SIZE;
4201
4202 /*
4203 * If we've got some full blocks to handle, then process these first
4204 */
4205 if (len != trailing_len) {
4206 if (out == NULL) {
4207 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4208 return -1;
4209 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4210 if (!CRYPTO_ocb128_encrypt
4211 (&octx->ocb, in, out, len - trailing_len))
4212 return -1;
4213 } else {
4214 if (!CRYPTO_ocb128_decrypt
4215 (&octx->ocb, in, out, len - trailing_len))
4216 return -1;
4217 }
4218 written_len += len - trailing_len;
4219 in += len - trailing_len;
4220 }
4221
4222 /* Handle any trailing partial block */
4223 if (trailing_len > 0) {
4224 memcpy(buf, in, trailing_len);
4225 *buf_len = trailing_len;
4226 }
4227
4228 return written_len;
4229 } else {
4230 /*
4231 * First of all empty the buffer of any partial block that we might
4232 * have been provided - both for data and AAD
4233 */
4234 if (octx->data_buf_len > 0) {
4235 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4236 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4237 octx->data_buf_len))
4238 return -1;
4239 } else {
4240 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4241 octx->data_buf_len))
4242 return -1;
4243 }
4244 written_len = octx->data_buf_len;
4245 octx->data_buf_len = 0;
4246 }
4247 if (octx->aad_buf_len > 0) {
4248 if (!CRYPTO_ocb128_aad
4249 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4250 return -1;
4251 octx->aad_buf_len = 0;
4252 }
4253 /* If decrypting then verify */
4254 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4255 if (octx->taglen < 0)
4256 return -1;
4257 if (CRYPTO_ocb128_finish(&octx->ocb,
4258 octx->tag, octx->taglen) != 0)
4259 return -1;
4260 octx->iv_set = 0;
4261 return written_len;
4262 }
4263 /* If encrypting then just get the tag */
4264 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4265 return -1;
4266 /* Don't reuse the IV */
4267 octx->iv_set = 0;
4268 return written_len;
4269 }
4270 }
4271
aes_ocb_cleanup(EVP_CIPHER_CTX * c)4272 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4273 {
4274 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4275 CRYPTO_ocb128_cleanup(&octx->ocb);
4276 return 1;
4277 }
4278
4279 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4280 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4281 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4282 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4283 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4284 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4285 #endif /* OPENSSL_NO_OCB */
4286