xref: /onnv-gate/usr/src/common/openssl/crypto/sha/sha_locl.h (revision 2139:6243c3338933)
1 /* crypto/sha/sha_locl.h */
2 /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
3  * All rights reserved.
4  *
5  * This package is an SSL implementation written
6  * by Eric Young (eay@cryptsoft.com).
7  * The implementation was written so as to conform with Netscapes SSL.
8  *
9  * This library is free for commercial and non-commercial use as long as
10  * the following conditions are aheared to.  The following conditions
11  * apply to all code found in this distribution, be it the RC4, RSA,
12  * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
13  * included with this distribution is covered by the same copyright terms
14  * except that the holder is Tim Hudson (tjh@cryptsoft.com).
15  *
16  * Copyright remains Eric Young's, and as such any Copyright notices in
17  * the code are not to be removed.
18  * If this package is used in a product, Eric Young should be given attribution
19  * as the author of the parts of the library used.
20  * This can be in the form of a textual message at program startup or
21  * in documentation (online or textual) provided with the package.
22  *
23  * Redistribution and use in source and binary forms, with or without
24  * modification, are permitted provided that the following conditions
25  * are met:
26  * 1. Redistributions of source code must retain the copyright
27  *    notice, this list of conditions and the following disclaimer.
28  * 2. Redistributions in binary form must reproduce the above copyright
29  *    notice, this list of conditions and the following disclaimer in the
30  *    documentation and/or other materials provided with the distribution.
31  * 3. All advertising materials mentioning features or use of this software
32  *    must display the following acknowledgement:
33  *    "This product includes cryptographic software written by
34  *     Eric Young (eay@cryptsoft.com)"
35  *    The word 'cryptographic' can be left out if the rouines from the library
36  *    being used are not cryptographic related :-).
37  * 4. If you include any Windows specific code (or a derivative thereof) from
38  *    the apps directory (application code) you must include an acknowledgement:
39  *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
40  *
41  * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
42  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
43  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
44  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
45  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
46  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
47  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
48  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
49  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
50  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
51  * SUCH DAMAGE.
52  *
53  * The licence and distribution terms for any publically available version or
54  * derivative of this code cannot be changed.  i.e. this code cannot simply be
55  * copied and put under another distribution licence
56  * [including the GNU Public Licence.]
57  */
58 
59 #include <stdlib.h>
60 #include <string.h>
61 
62 #include <openssl/opensslconf.h>
63 #include <openssl/sha.h>
64 
65 #ifndef SHA_LONG_LOG2
66 #define SHA_LONG_LOG2	2	/* default to 32 bits */
67 #endif
68 
69 #define DATA_ORDER_IS_BIG_ENDIAN
70 
71 #define HASH_LONG               SHA_LONG
72 #define HASH_LONG_LOG2          SHA_LONG_LOG2
73 #define HASH_CTX                SHA_CTX
74 #define HASH_CBLOCK             SHA_CBLOCK
75 #define HASH_LBLOCK             SHA_LBLOCK
76 #define HASH_MAKE_STRING(c,s)   do {	\
77 	unsigned long ll;		\
78 	ll=(c)->h0; HOST_l2c(ll,(s));	\
79 	ll=(c)->h1; HOST_l2c(ll,(s));	\
80 	ll=(c)->h2; HOST_l2c(ll,(s));	\
81 	ll=(c)->h3; HOST_l2c(ll,(s));	\
82 	ll=(c)->h4; HOST_l2c(ll,(s));	\
83 	} while (0)
84 
85 #if defined(SHA_0)
86 
87 # define HASH_UPDATE             	SHA_Update
88 # define HASH_TRANSFORM          	SHA_Transform
89 # define HASH_FINAL              	SHA_Final
90 # define HASH_INIT			SHA_Init
91 # define HASH_BLOCK_HOST_ORDER   	sha_block_host_order
92 # define HASH_BLOCK_DATA_ORDER   	sha_block_data_order
93 # define Xupdate(a,ix,ia,ib,ic,id)	(ix=(a)=(ia^ib^ic^id))
94 
95   void sha_block_host_order (SHA_CTX *c, const void *p,size_t num);
96   void sha_block_data_order (SHA_CTX *c, const void *p,size_t num);
97 
98 #elif defined(SHA_1)
99 
100 # define HASH_UPDATE             	SHA1_Update
101 # define HASH_TRANSFORM          	SHA1_Transform
102 # define HASH_FINAL              	SHA1_Final
103 # define HASH_INIT			SHA1_Init
104 # define HASH_BLOCK_HOST_ORDER   	sha1_block_host_order
105 # define HASH_BLOCK_DATA_ORDER   	sha1_block_data_order
106 # if defined(__MWERKS__) && defined(__MC68K__)
107    /* Metrowerks for Motorola fails otherwise:-( <appro@fy.chalmers.se> */
108 #  define Xupdate(a,ix,ia,ib,ic,id)	do { (a)=(ia^ib^ic^id);		\
109 					     ix=(a)=ROTATE((a),1);	\
110 					} while (0)
111 # else
112 #  define Xupdate(a,ix,ia,ib,ic,id)	( (a)=(ia^ib^ic^id),	\
113 					  ix=(a)=ROTATE((a),1)	\
114 					)
115 # endif
116 
117 # ifdef SHA1_ASM
118 #  if defined(__i386) || defined(__i386__) || defined(_M_IX86) || defined(__INTEL__)
119 #   if !defined(B_ENDIAN)
120 #    define sha1_block_host_order		sha1_block_asm_host_order
121 #    define DONT_IMPLEMENT_BLOCK_HOST_ORDER
122 #    define sha1_block_data_order		sha1_block_asm_data_order
123 #    define DONT_IMPLEMENT_BLOCK_DATA_ORDER
124 #    define HASH_BLOCK_DATA_ORDER_ALIGNED	sha1_block_asm_data_order
125 #   endif
126 #  elif defined(__ia64) || defined(__ia64__) || defined(_M_IA64)
127 #   define sha1_block_host_order		sha1_block_asm_host_order
128 #   define DONT_IMPLEMENT_BLOCK_HOST_ORDER
129 #   define sha1_block_data_order		sha1_block_asm_data_order
130 #   define DONT_IMPLEMENT_BLOCK_DATA_ORDER
131 #  endif
132 # endif
133   void sha1_block_host_order (SHA_CTX *c, const void *p,size_t num);
134   void sha1_block_data_order (SHA_CTX *c, const void *p,size_t num);
135 
136 #else
137 # error "Either SHA_0 or SHA_1 must be defined."
138 #endif
139 
140 #include "md32_common.h"
141 
142 #define INIT_DATA_h0 0x67452301UL
143 #define INIT_DATA_h1 0xefcdab89UL
144 #define INIT_DATA_h2 0x98badcfeUL
145 #define INIT_DATA_h3 0x10325476UL
146 #define INIT_DATA_h4 0xc3d2e1f0UL
147 
HASH_INIT(SHA_CTX * c)148 int HASH_INIT (SHA_CTX *c)
149 	{
150 	c->h0=INIT_DATA_h0;
151 	c->h1=INIT_DATA_h1;
152 	c->h2=INIT_DATA_h2;
153 	c->h3=INIT_DATA_h3;
154 	c->h4=INIT_DATA_h4;
155 	c->Nl=0;
156 	c->Nh=0;
157 	c->num=0;
158 	return 1;
159 	}
160 
161 #define K_00_19	0x5a827999UL
162 #define K_20_39 0x6ed9eba1UL
163 #define K_40_59 0x8f1bbcdcUL
164 #define K_60_79 0xca62c1d6UL
165 
166 /* As  pointed out by Wei Dai <weidai@eskimo.com>, F() below can be
167  * simplified to the code in F_00_19.  Wei attributes these optimisations
168  * to Peter Gutmann's SHS code, and he attributes it to Rich Schroeppel.
169  * #define F(x,y,z) (((x) & (y))  |  ((~(x)) & (z)))
170  * I've just become aware of another tweak to be made, again from Wei Dai,
171  * in F_40_59, (x&a)|(y&a) -> (x|y)&a
172  */
173 #define	F_00_19(b,c,d)	((((c) ^ (d)) & (b)) ^ (d))
174 #define	F_20_39(b,c,d)	((b) ^ (c) ^ (d))
175 #define F_40_59(b,c,d)	(((b) & (c)) | (((b)|(c)) & (d)))
176 #define	F_60_79(b,c,d)	F_20_39(b,c,d)
177 
178 #ifndef OPENSSL_SMALL_FOOTPRINT
179 
180 #define BODY_00_15(i,a,b,c,d,e,f,xi) \
181 	(f)=xi+(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
182 	(b)=ROTATE((b),30);
183 
184 #define BODY_16_19(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
185 	Xupdate(f,xi,xa,xb,xc,xd); \
186 	(f)+=(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
187 	(b)=ROTATE((b),30);
188 
189 #define BODY_20_31(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
190 	Xupdate(f,xi,xa,xb,xc,xd); \
191 	(f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
192 	(b)=ROTATE((b),30);
193 
194 #define BODY_32_39(i,a,b,c,d,e,f,xa,xb,xc,xd) \
195 	Xupdate(f,xa,xa,xb,xc,xd); \
196 	(f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
197 	(b)=ROTATE((b),30);
198 
199 #define BODY_40_59(i,a,b,c,d,e,f,xa,xb,xc,xd) \
200 	Xupdate(f,xa,xa,xb,xc,xd); \
201 	(f)+=(e)+K_40_59+ROTATE((a),5)+F_40_59((b),(c),(d)); \
202 	(b)=ROTATE((b),30);
203 
204 #define BODY_60_79(i,a,b,c,d,e,f,xa,xb,xc,xd) \
205 	Xupdate(f,xa,xa,xb,xc,xd); \
206 	(f)=xa+(e)+K_60_79+ROTATE((a),5)+F_60_79((b),(c),(d)); \
207 	(b)=ROTATE((b),30);
208 
209 #ifdef X
210 #undef X
211 #endif
212 #ifndef MD32_XARRAY
213   /*
214    * Originally X was an array. As it's automatic it's natural
215    * to expect RISC compiler to accomodate at least part of it in
216    * the register bank, isn't it? Unfortunately not all compilers
217    * "find" this expectation reasonable:-( On order to make such
218    * compilers generate better code I replace X[] with a bunch of
219    * X0, X1, etc. See the function body below...
220    *					<appro@fy.chalmers.se>
221    */
222 # define X(i)	XX##i
223 #else
224   /*
225    * However! Some compilers (most notably HP C) get overwhelmed by
226    * that many local variables so that we have to have the way to
227    * fall down to the original behavior.
228    */
229 # define X(i)	XX[i]
230 #endif
231 
232 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
HASH_BLOCK_HOST_ORDER(SHA_CTX * c,const void * d,size_t num)233 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
234 	{
235 	const SHA_LONG *W=d;
236 	register unsigned MD32_REG_T A,B,C,D,E,T;
237 #ifndef MD32_XARRAY
238 	unsigned MD32_REG_T	XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
239 				XX8, XX9,XX10,XX11,XX12,XX13,XX14,XX15;
240 #else
241 	SHA_LONG	XX[16];
242 #endif
243 
244 	A=c->h0;
245 	B=c->h1;
246 	C=c->h2;
247 	D=c->h3;
248 	E=c->h4;
249 
250 	for (;;)
251 		{
252 	BODY_00_15( 0,A,B,C,D,E,T,W[ 0]);
253 	BODY_00_15( 1,T,A,B,C,D,E,W[ 1]);
254 	BODY_00_15( 2,E,T,A,B,C,D,W[ 2]);
255 	BODY_00_15( 3,D,E,T,A,B,C,W[ 3]);
256 	BODY_00_15( 4,C,D,E,T,A,B,W[ 4]);
257 	BODY_00_15( 5,B,C,D,E,T,A,W[ 5]);
258 	BODY_00_15( 6,A,B,C,D,E,T,W[ 6]);
259 	BODY_00_15( 7,T,A,B,C,D,E,W[ 7]);
260 	BODY_00_15( 8,E,T,A,B,C,D,W[ 8]);
261 	BODY_00_15( 9,D,E,T,A,B,C,W[ 9]);
262 	BODY_00_15(10,C,D,E,T,A,B,W[10]);
263 	BODY_00_15(11,B,C,D,E,T,A,W[11]);
264 	BODY_00_15(12,A,B,C,D,E,T,W[12]);
265 	BODY_00_15(13,T,A,B,C,D,E,W[13]);
266 	BODY_00_15(14,E,T,A,B,C,D,W[14]);
267 	BODY_00_15(15,D,E,T,A,B,C,W[15]);
268 
269 	BODY_16_19(16,C,D,E,T,A,B,X( 0),W[ 0],W[ 2],W[ 8],W[13]);
270 	BODY_16_19(17,B,C,D,E,T,A,X( 1),W[ 1],W[ 3],W[ 9],W[14]);
271 	BODY_16_19(18,A,B,C,D,E,T,X( 2),W[ 2],W[ 4],W[10],W[15]);
272 	BODY_16_19(19,T,A,B,C,D,E,X( 3),W[ 3],W[ 5],W[11],X( 0));
273 
274 	BODY_20_31(20,E,T,A,B,C,D,X( 4),W[ 4],W[ 6],W[12],X( 1));
275 	BODY_20_31(21,D,E,T,A,B,C,X( 5),W[ 5],W[ 7],W[13],X( 2));
276 	BODY_20_31(22,C,D,E,T,A,B,X( 6),W[ 6],W[ 8],W[14],X( 3));
277 	BODY_20_31(23,B,C,D,E,T,A,X( 7),W[ 7],W[ 9],W[15],X( 4));
278 	BODY_20_31(24,A,B,C,D,E,T,X( 8),W[ 8],W[10],X( 0),X( 5));
279 	BODY_20_31(25,T,A,B,C,D,E,X( 9),W[ 9],W[11],X( 1),X( 6));
280 	BODY_20_31(26,E,T,A,B,C,D,X(10),W[10],W[12],X( 2),X( 7));
281 	BODY_20_31(27,D,E,T,A,B,C,X(11),W[11],W[13],X( 3),X( 8));
282 	BODY_20_31(28,C,D,E,T,A,B,X(12),W[12],W[14],X( 4),X( 9));
283 	BODY_20_31(29,B,C,D,E,T,A,X(13),W[13],W[15],X( 5),X(10));
284 	BODY_20_31(30,A,B,C,D,E,T,X(14),W[14],X( 0),X( 6),X(11));
285 	BODY_20_31(31,T,A,B,C,D,E,X(15),W[15],X( 1),X( 7),X(12));
286 
287 	BODY_32_39(32,E,T,A,B,C,D,X( 0),X( 2),X( 8),X(13));
288 	BODY_32_39(33,D,E,T,A,B,C,X( 1),X( 3),X( 9),X(14));
289 	BODY_32_39(34,C,D,E,T,A,B,X( 2),X( 4),X(10),X(15));
290 	BODY_32_39(35,B,C,D,E,T,A,X( 3),X( 5),X(11),X( 0));
291 	BODY_32_39(36,A,B,C,D,E,T,X( 4),X( 6),X(12),X( 1));
292 	BODY_32_39(37,T,A,B,C,D,E,X( 5),X( 7),X(13),X( 2));
293 	BODY_32_39(38,E,T,A,B,C,D,X( 6),X( 8),X(14),X( 3));
294 	BODY_32_39(39,D,E,T,A,B,C,X( 7),X( 9),X(15),X( 4));
295 
296 	BODY_40_59(40,C,D,E,T,A,B,X( 8),X(10),X( 0),X( 5));
297 	BODY_40_59(41,B,C,D,E,T,A,X( 9),X(11),X( 1),X( 6));
298 	BODY_40_59(42,A,B,C,D,E,T,X(10),X(12),X( 2),X( 7));
299 	BODY_40_59(43,T,A,B,C,D,E,X(11),X(13),X( 3),X( 8));
300 	BODY_40_59(44,E,T,A,B,C,D,X(12),X(14),X( 4),X( 9));
301 	BODY_40_59(45,D,E,T,A,B,C,X(13),X(15),X( 5),X(10));
302 	BODY_40_59(46,C,D,E,T,A,B,X(14),X( 0),X( 6),X(11));
303 	BODY_40_59(47,B,C,D,E,T,A,X(15),X( 1),X( 7),X(12));
304 	BODY_40_59(48,A,B,C,D,E,T,X( 0),X( 2),X( 8),X(13));
305 	BODY_40_59(49,T,A,B,C,D,E,X( 1),X( 3),X( 9),X(14));
306 	BODY_40_59(50,E,T,A,B,C,D,X( 2),X( 4),X(10),X(15));
307 	BODY_40_59(51,D,E,T,A,B,C,X( 3),X( 5),X(11),X( 0));
308 	BODY_40_59(52,C,D,E,T,A,B,X( 4),X( 6),X(12),X( 1));
309 	BODY_40_59(53,B,C,D,E,T,A,X( 5),X( 7),X(13),X( 2));
310 	BODY_40_59(54,A,B,C,D,E,T,X( 6),X( 8),X(14),X( 3));
311 	BODY_40_59(55,T,A,B,C,D,E,X( 7),X( 9),X(15),X( 4));
312 	BODY_40_59(56,E,T,A,B,C,D,X( 8),X(10),X( 0),X( 5));
313 	BODY_40_59(57,D,E,T,A,B,C,X( 9),X(11),X( 1),X( 6));
314 	BODY_40_59(58,C,D,E,T,A,B,X(10),X(12),X( 2),X( 7));
315 	BODY_40_59(59,B,C,D,E,T,A,X(11),X(13),X( 3),X( 8));
316 
317 	BODY_60_79(60,A,B,C,D,E,T,X(12),X(14),X( 4),X( 9));
318 	BODY_60_79(61,T,A,B,C,D,E,X(13),X(15),X( 5),X(10));
319 	BODY_60_79(62,E,T,A,B,C,D,X(14),X( 0),X( 6),X(11));
320 	BODY_60_79(63,D,E,T,A,B,C,X(15),X( 1),X( 7),X(12));
321 	BODY_60_79(64,C,D,E,T,A,B,X( 0),X( 2),X( 8),X(13));
322 	BODY_60_79(65,B,C,D,E,T,A,X( 1),X( 3),X( 9),X(14));
323 	BODY_60_79(66,A,B,C,D,E,T,X( 2),X( 4),X(10),X(15));
324 	BODY_60_79(67,T,A,B,C,D,E,X( 3),X( 5),X(11),X( 0));
325 	BODY_60_79(68,E,T,A,B,C,D,X( 4),X( 6),X(12),X( 1));
326 	BODY_60_79(69,D,E,T,A,B,C,X( 5),X( 7),X(13),X( 2));
327 	BODY_60_79(70,C,D,E,T,A,B,X( 6),X( 8),X(14),X( 3));
328 	BODY_60_79(71,B,C,D,E,T,A,X( 7),X( 9),X(15),X( 4));
329 	BODY_60_79(72,A,B,C,D,E,T,X( 8),X(10),X( 0),X( 5));
330 	BODY_60_79(73,T,A,B,C,D,E,X( 9),X(11),X( 1),X( 6));
331 	BODY_60_79(74,E,T,A,B,C,D,X(10),X(12),X( 2),X( 7));
332 	BODY_60_79(75,D,E,T,A,B,C,X(11),X(13),X( 3),X( 8));
333 	BODY_60_79(76,C,D,E,T,A,B,X(12),X(14),X( 4),X( 9));
334 	BODY_60_79(77,B,C,D,E,T,A,X(13),X(15),X( 5),X(10));
335 	BODY_60_79(78,A,B,C,D,E,T,X(14),X( 0),X( 6),X(11));
336 	BODY_60_79(79,T,A,B,C,D,E,X(15),X( 1),X( 7),X(12));
337 
338 	c->h0=(c->h0+E)&0xffffffffL;
339 	c->h1=(c->h1+T)&0xffffffffL;
340 	c->h2=(c->h2+A)&0xffffffffL;
341 	c->h3=(c->h3+B)&0xffffffffL;
342 	c->h4=(c->h4+C)&0xffffffffL;
343 
344 	if (--num == 0) break;
345 
346 	A=c->h0;
347 	B=c->h1;
348 	C=c->h2;
349 	D=c->h3;
350 	E=c->h4;
351 
352 	W+=SHA_LBLOCK;
353 		}
354 	}
355 #endif
356 
357 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
HASH_BLOCK_DATA_ORDER(SHA_CTX * c,const void * p,size_t num)358 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
359 	{
360 	const unsigned char *data=p;
361 	register unsigned MD32_REG_T A,B,C,D,E,T,l;
362 #ifndef MD32_XARRAY
363 	unsigned MD32_REG_T	XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
364 				XX8, XX9,XX10,XX11,XX12,XX13,XX14,XX15;
365 #else
366 	SHA_LONG	XX[16];
367 #endif
368 
369 	A=c->h0;
370 	B=c->h1;
371 	C=c->h2;
372 	D=c->h3;
373 	E=c->h4;
374 
375 	for (;;)
376 		{
377 
378 	HOST_c2l(data,l); X( 0)=l;		HOST_c2l(data,l); X( 1)=l;
379 	BODY_00_15( 0,A,B,C,D,E,T,X( 0));	HOST_c2l(data,l); X( 2)=l;
380 	BODY_00_15( 1,T,A,B,C,D,E,X( 1));	HOST_c2l(data,l); X( 3)=l;
381 	BODY_00_15( 2,E,T,A,B,C,D,X( 2));	HOST_c2l(data,l); X( 4)=l;
382 	BODY_00_15( 3,D,E,T,A,B,C,X( 3));	HOST_c2l(data,l); X( 5)=l;
383 	BODY_00_15( 4,C,D,E,T,A,B,X( 4));	HOST_c2l(data,l); X( 6)=l;
384 	BODY_00_15( 5,B,C,D,E,T,A,X( 5));	HOST_c2l(data,l); X( 7)=l;
385 	BODY_00_15( 6,A,B,C,D,E,T,X( 6));	HOST_c2l(data,l); X( 8)=l;
386 	BODY_00_15( 7,T,A,B,C,D,E,X( 7));	HOST_c2l(data,l); X( 9)=l;
387 	BODY_00_15( 8,E,T,A,B,C,D,X( 8));	HOST_c2l(data,l); X(10)=l;
388 	BODY_00_15( 9,D,E,T,A,B,C,X( 9));	HOST_c2l(data,l); X(11)=l;
389 	BODY_00_15(10,C,D,E,T,A,B,X(10));	HOST_c2l(data,l); X(12)=l;
390 	BODY_00_15(11,B,C,D,E,T,A,X(11));	HOST_c2l(data,l); X(13)=l;
391 	BODY_00_15(12,A,B,C,D,E,T,X(12));	HOST_c2l(data,l); X(14)=l;
392 	BODY_00_15(13,T,A,B,C,D,E,X(13));	HOST_c2l(data,l); X(15)=l;
393 	BODY_00_15(14,E,T,A,B,C,D,X(14));
394 	BODY_00_15(15,D,E,T,A,B,C,X(15));
395 
396 	BODY_16_19(16,C,D,E,T,A,B,X( 0),X( 0),X( 2),X( 8),X(13));
397 	BODY_16_19(17,B,C,D,E,T,A,X( 1),X( 1),X( 3),X( 9),X(14));
398 	BODY_16_19(18,A,B,C,D,E,T,X( 2),X( 2),X( 4),X(10),X(15));
399 	BODY_16_19(19,T,A,B,C,D,E,X( 3),X( 3),X( 5),X(11),X( 0));
400 
401 	BODY_20_31(20,E,T,A,B,C,D,X( 4),X( 4),X( 6),X(12),X( 1));
402 	BODY_20_31(21,D,E,T,A,B,C,X( 5),X( 5),X( 7),X(13),X( 2));
403 	BODY_20_31(22,C,D,E,T,A,B,X( 6),X( 6),X( 8),X(14),X( 3));
404 	BODY_20_31(23,B,C,D,E,T,A,X( 7),X( 7),X( 9),X(15),X( 4));
405 	BODY_20_31(24,A,B,C,D,E,T,X( 8),X( 8),X(10),X( 0),X( 5));
406 	BODY_20_31(25,T,A,B,C,D,E,X( 9),X( 9),X(11),X( 1),X( 6));
407 	BODY_20_31(26,E,T,A,B,C,D,X(10),X(10),X(12),X( 2),X( 7));
408 	BODY_20_31(27,D,E,T,A,B,C,X(11),X(11),X(13),X( 3),X( 8));
409 	BODY_20_31(28,C,D,E,T,A,B,X(12),X(12),X(14),X( 4),X( 9));
410 	BODY_20_31(29,B,C,D,E,T,A,X(13),X(13),X(15),X( 5),X(10));
411 	BODY_20_31(30,A,B,C,D,E,T,X(14),X(14),X( 0),X( 6),X(11));
412 	BODY_20_31(31,T,A,B,C,D,E,X(15),X(15),X( 1),X( 7),X(12));
413 
414 	BODY_32_39(32,E,T,A,B,C,D,X( 0),X( 2),X( 8),X(13));
415 	BODY_32_39(33,D,E,T,A,B,C,X( 1),X( 3),X( 9),X(14));
416 	BODY_32_39(34,C,D,E,T,A,B,X( 2),X( 4),X(10),X(15));
417 	BODY_32_39(35,B,C,D,E,T,A,X( 3),X( 5),X(11),X( 0));
418 	BODY_32_39(36,A,B,C,D,E,T,X( 4),X( 6),X(12),X( 1));
419 	BODY_32_39(37,T,A,B,C,D,E,X( 5),X( 7),X(13),X( 2));
420 	BODY_32_39(38,E,T,A,B,C,D,X( 6),X( 8),X(14),X( 3));
421 	BODY_32_39(39,D,E,T,A,B,C,X( 7),X( 9),X(15),X( 4));
422 
423 	BODY_40_59(40,C,D,E,T,A,B,X( 8),X(10),X( 0),X( 5));
424 	BODY_40_59(41,B,C,D,E,T,A,X( 9),X(11),X( 1),X( 6));
425 	BODY_40_59(42,A,B,C,D,E,T,X(10),X(12),X( 2),X( 7));
426 	BODY_40_59(43,T,A,B,C,D,E,X(11),X(13),X( 3),X( 8));
427 	BODY_40_59(44,E,T,A,B,C,D,X(12),X(14),X( 4),X( 9));
428 	BODY_40_59(45,D,E,T,A,B,C,X(13),X(15),X( 5),X(10));
429 	BODY_40_59(46,C,D,E,T,A,B,X(14),X( 0),X( 6),X(11));
430 	BODY_40_59(47,B,C,D,E,T,A,X(15),X( 1),X( 7),X(12));
431 	BODY_40_59(48,A,B,C,D,E,T,X( 0),X( 2),X( 8),X(13));
432 	BODY_40_59(49,T,A,B,C,D,E,X( 1),X( 3),X( 9),X(14));
433 	BODY_40_59(50,E,T,A,B,C,D,X( 2),X( 4),X(10),X(15));
434 	BODY_40_59(51,D,E,T,A,B,C,X( 3),X( 5),X(11),X( 0));
435 	BODY_40_59(52,C,D,E,T,A,B,X( 4),X( 6),X(12),X( 1));
436 	BODY_40_59(53,B,C,D,E,T,A,X( 5),X( 7),X(13),X( 2));
437 	BODY_40_59(54,A,B,C,D,E,T,X( 6),X( 8),X(14),X( 3));
438 	BODY_40_59(55,T,A,B,C,D,E,X( 7),X( 9),X(15),X( 4));
439 	BODY_40_59(56,E,T,A,B,C,D,X( 8),X(10),X( 0),X( 5));
440 	BODY_40_59(57,D,E,T,A,B,C,X( 9),X(11),X( 1),X( 6));
441 	BODY_40_59(58,C,D,E,T,A,B,X(10),X(12),X( 2),X( 7));
442 	BODY_40_59(59,B,C,D,E,T,A,X(11),X(13),X( 3),X( 8));
443 
444 	BODY_60_79(60,A,B,C,D,E,T,X(12),X(14),X( 4),X( 9));
445 	BODY_60_79(61,T,A,B,C,D,E,X(13),X(15),X( 5),X(10));
446 	BODY_60_79(62,E,T,A,B,C,D,X(14),X( 0),X( 6),X(11));
447 	BODY_60_79(63,D,E,T,A,B,C,X(15),X( 1),X( 7),X(12));
448 	BODY_60_79(64,C,D,E,T,A,B,X( 0),X( 2),X( 8),X(13));
449 	BODY_60_79(65,B,C,D,E,T,A,X( 1),X( 3),X( 9),X(14));
450 	BODY_60_79(66,A,B,C,D,E,T,X( 2),X( 4),X(10),X(15));
451 	BODY_60_79(67,T,A,B,C,D,E,X( 3),X( 5),X(11),X( 0));
452 	BODY_60_79(68,E,T,A,B,C,D,X( 4),X( 6),X(12),X( 1));
453 	BODY_60_79(69,D,E,T,A,B,C,X( 5),X( 7),X(13),X( 2));
454 	BODY_60_79(70,C,D,E,T,A,B,X( 6),X( 8),X(14),X( 3));
455 	BODY_60_79(71,B,C,D,E,T,A,X( 7),X( 9),X(15),X( 4));
456 	BODY_60_79(72,A,B,C,D,E,T,X( 8),X(10),X( 0),X( 5));
457 	BODY_60_79(73,T,A,B,C,D,E,X( 9),X(11),X( 1),X( 6));
458 	BODY_60_79(74,E,T,A,B,C,D,X(10),X(12),X( 2),X( 7));
459 	BODY_60_79(75,D,E,T,A,B,C,X(11),X(13),X( 3),X( 8));
460 	BODY_60_79(76,C,D,E,T,A,B,X(12),X(14),X( 4),X( 9));
461 	BODY_60_79(77,B,C,D,E,T,A,X(13),X(15),X( 5),X(10));
462 	BODY_60_79(78,A,B,C,D,E,T,X(14),X( 0),X( 6),X(11));
463 	BODY_60_79(79,T,A,B,C,D,E,X(15),X( 1),X( 7),X(12));
464 
465 	c->h0=(c->h0+E)&0xffffffffL;
466 	c->h1=(c->h1+T)&0xffffffffL;
467 	c->h2=(c->h2+A)&0xffffffffL;
468 	c->h3=(c->h3+B)&0xffffffffL;
469 	c->h4=(c->h4+C)&0xffffffffL;
470 
471 	if (--num == 0) break;
472 
473 	A=c->h0;
474 	B=c->h1;
475 	C=c->h2;
476 	D=c->h3;
477 	E=c->h4;
478 
479 		}
480 	}
481 #endif
482 
483 #else	/* OPENSSL_SMALL_FOOTPRINT */
484 
485 #define BODY_00_15(xi)		 do {	\
486 	T=E+K_00_19+F_00_19(B,C,D);	\
487 	E=D, D=C, C=ROTATE(B,30), B=A;	\
488 	A=ROTATE(A,5)+T+xi;	    } while(0)
489 
490 #define BODY_16_19(xa,xb,xc,xd)	 do {	\
491 	Xupdate(T,xa,xa,xb,xc,xd);	\
492 	T+=E+K_00_19+F_00_19(B,C,D);	\
493 	E=D, D=C, C=ROTATE(B,30), B=A;	\
494 	A=ROTATE(A,5)+T;	    } while(0)
495 
496 #define BODY_20_39(xa,xb,xc,xd)	 do {	\
497 	Xupdate(T,xa,xa,xb,xc,xd);	\
498 	T+=E+K_20_39+F_20_39(B,C,D);	\
499 	E=D, D=C, C=ROTATE(B,30), B=A;	\
500 	A=ROTATE(A,5)+T;	    } while(0)
501 
502 #define BODY_40_59(xa,xb,xc,xd)	 do {	\
503 	Xupdate(T,xa,xa,xb,xc,xd);	\
504 	T+=E+K_40_59+F_40_59(B,C,D);	\
505 	E=D, D=C, C=ROTATE(B,30), B=A;	\
506 	A=ROTATE(A,5)+T;	    } while(0)
507 
508 #define BODY_60_79(xa,xb,xc,xd)	 do {	\
509 	Xupdate(T,xa,xa,xb,xc,xd);	\
510 	T=E+K_60_79+F_60_79(B,C,D);	\
511 	E=D, D=C, C=ROTATE(B,30), B=A;	\
512 	A=ROTATE(A,5)+T+xa;	    } while(0)
513 
514 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
HASH_BLOCK_HOST_ORDER(SHA_CTX * c,const void * d,size_t num)515 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
516 	{
517 	const SHA_LONG *W=d;
518 	register unsigned MD32_REG_T A,B,C,D,E,T;
519 	int i;
520 	SHA_LONG	X[16];
521 
522 	A=c->h0;
523 	B=c->h1;
524 	C=c->h2;
525 	D=c->h3;
526 	E=c->h4;
527 
528 	for (;;)
529 		{
530 	for (i=0;i<16;i++)
531 	{ X[i]=W[i]; BODY_00_15(X[i]); }
532 	for (i=0;i<4;i++)
533 	{ BODY_16_19(X[i],       X[i+2],      X[i+8],     X[(i+13)&15]); }
534 	for (;i<24;i++)
535 	{ BODY_20_39(X[i&15],    X[(i+2)&15], X[(i+8)&15],X[(i+13)&15]); }
536 	for (i=0;i<20;i++)
537 	{ BODY_40_59(X[(i+8)&15],X[(i+10)&15],X[i&15],    X[(i+5)&15]);  }
538 	for (i=4;i<24;i++)
539 	{ BODY_60_79(X[(i+8)&15],X[(i+10)&15],X[i&15],    X[(i+5)&15]);  }
540 
541 	c->h0=(c->h0+A)&0xffffffffL;
542 	c->h1=(c->h1+B)&0xffffffffL;
543 	c->h2=(c->h2+C)&0xffffffffL;
544 	c->h3=(c->h3+D)&0xffffffffL;
545 	c->h4=(c->h4+E)&0xffffffffL;
546 
547 	if (--num == 0) break;
548 
549 	A=c->h0;
550 	B=c->h1;
551 	C=c->h2;
552 	D=c->h3;
553 	E=c->h4;
554 
555 	W+=SHA_LBLOCK;
556 		}
557 	}
558 #endif
559 
560 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
HASH_BLOCK_DATA_ORDER(SHA_CTX * c,const void * p,size_t num)561 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
562 	{
563 	const unsigned char *data=p;
564 	register unsigned MD32_REG_T A,B,C,D,E,T,l;
565 	int i;
566 	SHA_LONG	X[16];
567 
568 	A=c->h0;
569 	B=c->h1;
570 	C=c->h2;
571 	D=c->h3;
572 	E=c->h4;
573 
574 	for (;;)
575 		{
576 	for (i=0;i<16;i++)
577 	{ HOST_c2l(data,l); X[i]=l; BODY_00_15(X[i]); }
578 	for (i=0;i<4;i++)
579 	{ BODY_16_19(X[i],       X[i+2],      X[i+8],     X[(i+13)&15]); }
580 	for (;i<24;i++)
581 	{ BODY_20_39(X[i&15],    X[(i+2)&15], X[(i+8)&15],X[(i+13)&15]); }
582 	for (i=0;i<20;i++)
583 	{ BODY_40_59(X[(i+8)&15],X[(i+10)&15],X[i&15],    X[(i+5)&15]);  }
584 	for (i=4;i<24;i++)
585 	{ BODY_60_79(X[(i+8)&15],X[(i+10)&15],X[i&15],    X[(i+5)&15]);  }
586 
587 	c->h0=(c->h0+A)&0xffffffffL;
588 	c->h1=(c->h1+B)&0xffffffffL;
589 	c->h2=(c->h2+C)&0xffffffffL;
590 	c->h3=(c->h3+D)&0xffffffffL;
591 	c->h4=(c->h4+E)&0xffffffffL;
592 
593 	if (--num == 0) break;
594 
595 	A=c->h0;
596 	B=c->h1;
597 	C=c->h2;
598 	D=c->h3;
599 	E=c->h4;
600 
601 		}
602 	}
603 #endif
604 
605 #endif
606