1 /* $NetBSD: aes_armv8.c,v 1.5 2020/07/25 22:33:04 riastradh Exp $ */
2
3 /*-
4 * Copyright (c) 2020 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #include <sys/cdefs.h>
30 __KERNEL_RCSID(1, "$NetBSD: aes_armv8.c,v 1.5 2020/07/25 22:33:04 riastradh Exp $");
31
32 #ifdef _KERNEL
33 #include <sys/types.h>
34 #include <sys/proc.h>
35 #include <sys/systm.h>
36 #else
37 #include <assert.h>
38 #include <err.h>
39 #include <stdint.h>
40 #include <string.h>
41 #define KASSERT assert
42 #define panic(fmt, args...) err(1, fmt, args)
43 #endif
44
45 #include <crypto/aes/aes.h>
46 #include <crypto/aes/aes_impl.h>
47 #include <crypto/aes/arch/arm/aes_armv8.h>
48
49 #include <aarch64/armreg.h>
50
51 #ifdef _KERNEL
52 #include <arm/fpu.h>
53 #else
54 #include <sys/sysctl.h>
55 #include <stddef.h>
56 #define fpu_kern_enter() ((void)0)
57 #define fpu_kern_leave() ((void)0)
58 #endif
59
60 static void
aesarmv8_setenckey(struct aesenc * enc,const uint8_t key[static16],uint32_t nrounds)61 aesarmv8_setenckey(struct aesenc *enc, const uint8_t key[static 16],
62 uint32_t nrounds)
63 {
64
65 switch (nrounds) {
66 case 10:
67 aesarmv8_setenckey128(enc, key);
68 break;
69 case 12:
70 aesarmv8_setenckey192(enc, key);
71 break;
72 case 14:
73 aesarmv8_setenckey256(enc, key);
74 break;
75 default:
76 panic("invalid AES rounds: %u", nrounds);
77 }
78 }
79
80 static void
aesarmv8_setenckey_impl(struct aesenc * enc,const uint8_t key[static16],uint32_t nrounds)81 aesarmv8_setenckey_impl(struct aesenc *enc, const uint8_t key[static 16],
82 uint32_t nrounds)
83 {
84
85 fpu_kern_enter();
86 aesarmv8_setenckey(enc, key, nrounds);
87 fpu_kern_leave();
88 }
89
90 static void
aesarmv8_setdeckey_impl(struct aesdec * dec,const uint8_t key[static16],uint32_t nrounds)91 aesarmv8_setdeckey_impl(struct aesdec *dec, const uint8_t key[static 16],
92 uint32_t nrounds)
93 {
94 struct aesenc enc;
95
96 fpu_kern_enter();
97 aesarmv8_setenckey(&enc, key, nrounds);
98 aesarmv8_enctodec(&enc, dec, nrounds);
99 fpu_kern_leave();
100
101 explicit_memset(&enc, 0, sizeof enc);
102 }
103
104 static void
aesarmv8_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],uint32_t nrounds)105 aesarmv8_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
106 uint8_t out[static 16], uint32_t nrounds)
107 {
108
109 fpu_kern_enter();
110 aesarmv8_enc(enc, in, out, nrounds);
111 fpu_kern_leave();
112 }
113
114 static void
aesarmv8_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],uint32_t nrounds)115 aesarmv8_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
116 uint8_t out[static 16], uint32_t nrounds)
117 {
118
119 fpu_kern_enter();
120 aesarmv8_dec(dec, in, out, nrounds);
121 fpu_kern_leave();
122 }
123
124 static void
aesarmv8_cbc_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t iv[static16],uint32_t nrounds)125 aesarmv8_cbc_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
126 uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
127 uint32_t nrounds)
128 {
129
130 KASSERT(nbytes % 16 == 0);
131
132 fpu_kern_enter();
133 aesarmv8_cbc_enc(enc, in, out, nbytes, iv, nrounds);
134 fpu_kern_leave();
135 }
136
137 static void
aesarmv8_cbc_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t iv[static16],uint32_t nrounds)138 aesarmv8_cbc_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
139 uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
140 uint32_t nrounds)
141 {
142
143 KASSERT(nbytes % 16 == 0);
144
145 fpu_kern_enter();
146
147 if (nbytes % 128) {
148 aesarmv8_cbc_dec1(dec, in, out, nbytes % 128, iv, nrounds);
149 in += nbytes % 128;
150 out += nbytes % 128;
151 nbytes -= nbytes % 128;
152 }
153
154 KASSERT(nbytes % 128 == 0);
155 if (nbytes)
156 aesarmv8_cbc_dec8(dec, in, out, nbytes, iv, nrounds);
157
158 fpu_kern_leave();
159 }
160
161 static void
aesarmv8_xts_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t tweak[static16],uint32_t nrounds)162 aesarmv8_xts_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
163 uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
164 uint32_t nrounds)
165 {
166
167 KASSERT(nbytes % 16 == 0);
168
169 fpu_kern_enter();
170
171 if (nbytes % 128) {
172 aesarmv8_xts_enc1(enc, in, out, nbytes % 128, tweak, nrounds);
173 in += nbytes % 128;
174 out += nbytes % 128;
175 nbytes -= nbytes % 128;
176 }
177
178 KASSERT(nbytes % 128 == 0);
179 if (nbytes)
180 aesarmv8_xts_enc8(enc, in, out, nbytes, tweak, nrounds);
181
182 fpu_kern_leave();
183 }
184
185 static void
aesarmv8_xts_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t tweak[static16],uint32_t nrounds)186 aesarmv8_xts_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
187 uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
188 uint32_t nrounds)
189 {
190
191 KASSERT(nbytes % 16 == 0);
192
193 fpu_kern_enter();
194
195 if (nbytes % 128) {
196 aesarmv8_xts_dec1(dec, in, out, nbytes % 128, tweak, nrounds);
197 in += nbytes % 128;
198 out += nbytes % 128;
199 nbytes -= nbytes % 128;
200 }
201
202 KASSERT(nbytes % 128 == 0);
203 if (nbytes)
204 aesarmv8_xts_dec8(dec, in, out, nbytes, tweak, nrounds);
205
206 fpu_kern_leave();
207 }
208
209 static void
aesarmv8_cbcmac_update1_impl(const struct aesenc * enc,const uint8_t in[static16],size_t nbytes,uint8_t auth[static16],uint32_t nrounds)210 aesarmv8_cbcmac_update1_impl(const struct aesenc *enc,
211 const uint8_t in[static 16], size_t nbytes, uint8_t auth[static 16],
212 uint32_t nrounds)
213 {
214
215 KASSERT(nbytes);
216 KASSERT(nbytes % 16 == 0);
217
218 fpu_kern_enter();
219 aesarmv8_cbcmac_update1(enc, in, nbytes, auth, nrounds);
220 fpu_kern_leave();
221 }
222
223 static void
aesarmv8_ccm_enc1_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t authctr[static32],uint32_t nrounds)224 aesarmv8_ccm_enc1_impl(const struct aesenc *enc, const uint8_t in[static 16],
225 uint8_t out[static 16], size_t nbytes, uint8_t authctr[static 32],
226 uint32_t nrounds)
227 {
228
229 KASSERT(nbytes);
230 KASSERT(nbytes % 16 == 0);
231
232 fpu_kern_enter();
233 aesarmv8_ccm_enc1(enc, in, out, nbytes, authctr, nrounds);
234 fpu_kern_leave();
235 }
236
237 static void
aesarmv8_ccm_dec1_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t authctr[static32],uint32_t nrounds)238 aesarmv8_ccm_dec1_impl(const struct aesenc *enc, const uint8_t in[static 16],
239 uint8_t out[static 16], size_t nbytes, uint8_t authctr[static 32],
240 uint32_t nrounds)
241 {
242
243 KASSERT(nbytes);
244 KASSERT(nbytes % 16 == 0);
245
246 fpu_kern_enter();
247 aesarmv8_ccm_dec1(enc, in, out, nbytes, authctr, nrounds);
248 fpu_kern_leave();
249 }
250
251 static int
aesarmv8_xts_update_selftest(void)252 aesarmv8_xts_update_selftest(void)
253 {
254 static const struct {
255 uint8_t in[16], out[16];
256 } cases[] = {
257 {{1}, {2}},
258 {{0,0,0,0x80}, {0,0,0,0,1}},
259 {{0,0,0,0,0,0,0,0x80}, {0,0,0,0,0,0,0,0,1}},
260 {{0,0,0,0x80,0,0,0,0x80}, {0,0,0,0,1,0,0,0,1}},
261 {{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0x80}, {0x87}},
262 {{0,0,0,0,0,0,0,0x80,0,0,0,0,0,0,0,0x80},
263 {0x87,0,0,0,0,0,0,0,1}},
264 {{0,0,0,0x80,0,0,0,0,0,0,0,0,0,0,0,0x80}, {0x87,0,0,0,1}},
265 {{0,0,0,0x80,0,0,0,0x80,0,0,0,0,0,0,0,0x80},
266 {0x87,0,0,0,1,0,0,0,1}},
267 };
268 unsigned i;
269 uint8_t tweak[16];
270
271 for (i = 0; i < sizeof(cases)/sizeof(cases[0]); i++) {
272 aesarmv8_xts_update(cases[i].in, tweak);
273 if (memcmp(tweak, cases[i].out, 16))
274 return -1;
275 }
276
277 /* Success! */
278 return 0;
279 }
280
281 static int
aesarmv8_probe(void)282 aesarmv8_probe(void)
283 {
284 struct aarch64_sysctl_cpu_id *id;
285 int result = 0;
286
287 /* Verify that the CPU supports AES. */
288 #ifdef _KERNEL
289 id = &curcpu()->ci_id;
290 #else
291 struct aarch64_sysctl_cpu_id ids;
292 size_t idlen;
293 id = &ids;
294 idlen = sizeof ids;
295 if (sysctlbyname("machdep.cpu0.cpu_id", id, &idlen, NULL, 0))
296 return -1;
297 if (idlen != sizeof ids)
298 return -1;
299 #endif
300 switch (__SHIFTOUT(id->ac_aa64isar0, ID_AA64ISAR0_EL1_AES)) {
301 case ID_AA64ISAR0_EL1_AES_AES:
302 case ID_AA64ISAR0_EL1_AES_PMUL:
303 break;
304 default:
305 return -1;
306 }
307
308 fpu_kern_enter();
309
310 /* Verify that our XTS tweak update logic works. */
311 if (aesarmv8_xts_update_selftest())
312 result = -1;
313
314 fpu_kern_leave();
315
316 return result;
317 }
318
319 struct aes_impl aes_armv8_impl = {
320 .ai_name = "ARMv8.0-AES",
321 .ai_probe = aesarmv8_probe,
322 .ai_setenckey = aesarmv8_setenckey_impl,
323 .ai_setdeckey = aesarmv8_setdeckey_impl,
324 .ai_enc = aesarmv8_enc_impl,
325 .ai_dec = aesarmv8_dec_impl,
326 .ai_cbc_enc = aesarmv8_cbc_enc_impl,
327 .ai_cbc_dec = aesarmv8_cbc_dec_impl,
328 .ai_xts_enc = aesarmv8_xts_enc_impl,
329 .ai_xts_dec = aesarmv8_xts_dec_impl,
330 .ai_cbcmac_update1 = aesarmv8_cbcmac_update1_impl,
331 .ai_ccm_enc1 = aesarmv8_ccm_enc1_impl,
332 .ai_ccm_dec1 = aesarmv8_ccm_dec1_impl,
333 };
334