xref: /netbsd-src/sys/crypto/aes/arch/x86/aes_sse2_impl.c (revision 081e09b4c188de5344b3e10b5a19d9803198dcfb)
1 /*	$NetBSD: aes_sse2_impl.c,v 1.5 2020/07/25 22:29:56 riastradh Exp $	*/
2 
3 /*-
4  * Copyright (c) 2020 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26  * POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <sys/cdefs.h>
30 __KERNEL_RCSID(1, "$NetBSD: aes_sse2_impl.c,v 1.5 2020/07/25 22:29:56 riastradh Exp $");
31 
32 #include <sys/types.h>
33 #include <sys/endian.h>
34 
35 #include <crypto/aes/aes.h>
36 #include <crypto/aes/aes_impl.h>
37 #include <crypto/aes/arch/x86/aes_sse2.h>
38 
39 #ifdef _KERNEL
40 #include <x86/cpu.h>
41 #include <x86/cpuvar.h>
42 #include <x86/fpu.h>
43 #include <x86/specialreg.h>
44 #else
45 #include <cpuid.h>
46 #define	fpu_kern_enter()	((void)0)
47 #define	fpu_kern_leave()	((void)0)
48 #endif
49 
50 static void
aes_sse2_setenckey_impl(struct aesenc * enc,const uint8_t * key,uint32_t nrounds)51 aes_sse2_setenckey_impl(struct aesenc *enc, const uint8_t *key,
52     uint32_t nrounds)
53 {
54 
55 	fpu_kern_enter();
56 	aes_sse2_setkey(enc->aese_aes.aes_rk64, key, nrounds);
57 	fpu_kern_leave();
58 }
59 
60 static void
aes_sse2_setdeckey_impl(struct aesdec * dec,const uint8_t * key,uint32_t nrounds)61 aes_sse2_setdeckey_impl(struct aesdec *dec, const uint8_t *key,
62     uint32_t nrounds)
63 {
64 
65 	fpu_kern_enter();
66 	/*
67 	 * BearSSL computes InvMixColumns on the fly -- no need for
68 	 * distinct decryption round keys.
69 	 */
70 	aes_sse2_setkey(dec->aesd_aes.aes_rk64, key, nrounds);
71 	fpu_kern_leave();
72 }
73 
74 static void
aes_sse2_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],uint32_t nrounds)75 aes_sse2_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
76     uint8_t out[static 16], uint32_t nrounds)
77 {
78 
79 	fpu_kern_enter();
80 	aes_sse2_enc(enc, in, out, nrounds);
81 	fpu_kern_leave();
82 }
83 
84 static void
aes_sse2_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],uint32_t nrounds)85 aes_sse2_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
86     uint8_t out[static 16], uint32_t nrounds)
87 {
88 
89 	fpu_kern_enter();
90 	aes_sse2_dec(dec, in, out, nrounds);
91 	fpu_kern_leave();
92 }
93 
94 static void
aes_sse2_cbc_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t iv[static16],uint32_t nrounds)95 aes_sse2_cbc_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
96     uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
97     uint32_t nrounds)
98 {
99 
100 	if (nbytes == 0)
101 		return;
102 	fpu_kern_enter();
103 	aes_sse2_cbc_enc(enc, in, out, nbytes, iv, nrounds);
104 	fpu_kern_leave();
105 }
106 
107 static void
aes_sse2_cbc_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t iv[static16],uint32_t nrounds)108 aes_sse2_cbc_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
109     uint8_t out[static 16], size_t nbytes, uint8_t iv[static 16],
110     uint32_t nrounds)
111 {
112 
113 	if (nbytes == 0)
114 		return;
115 	fpu_kern_enter();
116 	aes_sse2_cbc_dec(dec, in, out, nbytes, iv, nrounds);
117 	fpu_kern_leave();
118 }
119 
120 static void
aes_sse2_xts_enc_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t tweak[static16],uint32_t nrounds)121 aes_sse2_xts_enc_impl(const struct aesenc *enc, const uint8_t in[static 16],
122     uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
123     uint32_t nrounds)
124 {
125 
126 	if (nbytes == 0)
127 		return;
128 	fpu_kern_enter();
129 	aes_sse2_xts_enc(enc, in, out, nbytes, tweak, nrounds);
130 	fpu_kern_leave();
131 }
132 
133 static void
aes_sse2_xts_dec_impl(const struct aesdec * dec,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t tweak[static16],uint32_t nrounds)134 aes_sse2_xts_dec_impl(const struct aesdec *dec, const uint8_t in[static 16],
135     uint8_t out[static 16], size_t nbytes, uint8_t tweak[static 16],
136     uint32_t nrounds)
137 {
138 
139 	if (nbytes == 0)
140 		return;
141 	fpu_kern_enter();
142 	aes_sse2_xts_dec(dec, in, out, nbytes, tweak, nrounds);
143 	fpu_kern_leave();
144 }
145 
146 static void
aes_sse2_cbcmac_update1_impl(const struct aesenc * enc,const uint8_t in[static16],size_t nbytes,uint8_t auth[static16],uint32_t nrounds)147 aes_sse2_cbcmac_update1_impl(const struct aesenc *enc,
148     const uint8_t in[static 16], size_t nbytes, uint8_t auth[static 16],
149     uint32_t nrounds)
150 {
151 
152 	fpu_kern_enter();
153 	aes_sse2_cbcmac_update1(enc, in, nbytes, auth, nrounds);
154 	fpu_kern_leave();
155 }
156 
157 static void
aes_sse2_ccm_enc1_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t authctr[static32],uint32_t nrounds)158 aes_sse2_ccm_enc1_impl(const struct aesenc *enc, const uint8_t in[static 16],
159     uint8_t out[static 16], size_t nbytes, uint8_t authctr[static 32],
160     uint32_t nrounds)
161 {
162 
163 	fpu_kern_enter();
164 	aes_sse2_ccm_enc1(enc, in, out, nbytes, authctr, nrounds);
165 	fpu_kern_leave();
166 }
167 
168 static void
aes_sse2_ccm_dec1_impl(const struct aesenc * enc,const uint8_t in[static16],uint8_t out[static16],size_t nbytes,uint8_t authctr[static32],uint32_t nrounds)169 aes_sse2_ccm_dec1_impl(const struct aesenc *enc, const uint8_t in[static 16],
170     uint8_t out[static 16], size_t nbytes, uint8_t authctr[static 32],
171     uint32_t nrounds)
172 {
173 
174 	fpu_kern_enter();
175 	aes_sse2_ccm_dec1(enc, in, out, nbytes, authctr, nrounds);
176 	fpu_kern_leave();
177 }
178 
179 static int
aes_sse2_probe(void)180 aes_sse2_probe(void)
181 {
182 	int result = 0;
183 
184 	/* Verify that the CPU supports SSE and SSE2.  */
185 #ifdef _KERNEL
186 	if (!i386_has_sse)
187 		return -1;
188 	if (!i386_has_sse2)
189 		return -1;
190 #else
191 	unsigned eax, ebx, ecx, edx;
192 	if (!__get_cpuid(1, &eax, &ebx, &ecx, &edx))
193 		return -1;
194 	if ((edx & bit_SSE) == 0)
195 		return -1;
196 	if ((edx & bit_SSE2) == 0)
197 		return -1;
198 #endif
199 
200 	fpu_kern_enter();
201 	result = aes_sse2_selftest();
202 	fpu_kern_leave();
203 
204 	return result;
205 }
206 
207 struct aes_impl aes_sse2_impl = {
208 	.ai_name = "Intel SSE2 bitsliced",
209 	.ai_probe = aes_sse2_probe,
210 	.ai_setenckey = aes_sse2_setenckey_impl,
211 	.ai_setdeckey = aes_sse2_setdeckey_impl,
212 	.ai_enc = aes_sse2_enc_impl,
213 	.ai_dec = aes_sse2_dec_impl,
214 	.ai_cbc_enc = aes_sse2_cbc_enc_impl,
215 	.ai_cbc_dec = aes_sse2_cbc_dec_impl,
216 	.ai_xts_enc = aes_sse2_xts_enc_impl,
217 	.ai_xts_dec = aes_sse2_xts_dec_impl,
218 	.ai_cbcmac_update1 = aes_sse2_cbcmac_update1_impl,
219 	.ai_ccm_enc1 = aes_sse2_ccm_enc1_impl,
220 	.ai_ccm_dec1 = aes_sse2_ccm_dec1_impl,
221 };
222