xref: /netbsd-src/crypto/external/bsd/heimdal/dist/lib/gssapi/krb5/arcfour.c (revision 53b02e147d4ed531c0d2a5ca9b3e8026ba3e99b5)
1 /*	$NetBSD: arcfour.c,v 1.4 2019/12/15 22:50:47 christos Exp $	*/
2 
3 /*
4  * Copyright (c) 2003 - 2006 Kungliga Tekniska Högskolan
5  * (Royal Institute of Technology, Stockholm, Sweden).
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  *
12  * 1. Redistributions of source code must retain the above copyright
13  *    notice, this list of conditions and the following disclaimer.
14  *
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * 3. Neither the name of the Institute nor the names of its contributors
20  *    may be used to endorse or promote products derived from this software
21  *    without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
24  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26  * ARE DISCLAIMED.  IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
27  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
28  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
29  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
32  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33  * SUCH DAMAGE.
34  */
35 
36 #include "gsskrb5_locl.h"
37 
38 /*
39  * Implements draft-brezak-win2k-krb-rc4-hmac-04.txt
40  *
41  * The arcfour message have the following formats:
42  *
43  * MIC token
44  * 	TOK_ID[2] = 01 01
45  *	SGN_ALG[2] = 11 00
46  *	Filler[4]
47  *	SND_SEQ[8]
48  *	SGN_CKSUM[8]
49  *
50  * WRAP token
51  *	TOK_ID[2] = 02 01
52  *	SGN_ALG[2];
53  *	SEAL_ALG[2]
54  *	Filler[2]
55  *	SND_SEQ[2]
56  *	SGN_CKSUM[8]
57  *	Confounder[8]
58  */
59 
60 /*
61  * WRAP in DCE-style have a fixed size header, the oid and length over
62  * the WRAP header is a total of
63  * GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE +
64  * GSS_ARCFOUR_WRAP_TOKEN_SIZE byte (ie total of 45 bytes overhead,
65  * remember the 2 bytes from APPL [0] SEQ).
66  */
67 
68 #define GSS_ARCFOUR_WRAP_TOKEN_SIZE 32
69 #define GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE 13
70 
71 
72 static krb5_error_code
73 arcfour_mic_key(krb5_context context, krb5_keyblock *key,
74 		const void *cksum_data, size_t cksum_size,
75 		void *key6_data, size_t key6_size)
76 {
77     krb5_error_code ret;
78 
79     Checksum cksum_k5;
80     krb5_keyblock key5;
81     char k5_data[16];
82 
83     Checksum cksum_k6;
84 
85     char T[4];
86 
87     memset(T, 0, 4);
88     cksum_k5.checksum.data = k5_data;
89     cksum_k5.checksum.length = sizeof(k5_data);
90 
91     if (key->keytype == KRB5_ENCTYPE_ARCFOUR_HMAC_MD5_56) {
92 	char L40[14] = "fortybits";
93 
94 	memcpy(L40 + 10, T, sizeof(T));
95 	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
96 			L40, 14, 0, key, &cksum_k5);
97 	memset(&k5_data[7], 0xAB, 9);
98     } else {
99 	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
100 			T, 4, 0, key, &cksum_k5);
101     }
102     if (ret)
103 	return ret;
104 
105     key5.keytype = KRB5_ENCTYPE_ARCFOUR_HMAC_MD5;
106     key5.keyvalue = cksum_k5.checksum;
107 
108     cksum_k6.checksum.data = key6_data;
109     cksum_k6.checksum.length = key6_size;
110 
111     return krb5_hmac(context, CKSUMTYPE_RSA_MD5,
112 		     cksum_data, cksum_size, 0, &key5, &cksum_k6);
113 }
114 
115 
116 static krb5_error_code
117 arcfour_mic_cksum_iov(krb5_context context,
118 		      krb5_keyblock *key, unsigned usage,
119 		      u_char *sgn_cksum, size_t sgn_cksum_sz,
120 		      const u_char *v1, size_t l1,
121 		      const void *v2, size_t l2,
122 		      const gss_iov_buffer_desc *iov,
123 		      int iov_count,
124 		      const gss_iov_buffer_desc *padding)
125 {
126     Checksum CKSUM;
127     u_char *ptr;
128     size_t len;
129     size_t ofs = 0;
130     int i;
131     krb5_crypto crypto;
132     krb5_error_code ret;
133 
134     assert(sgn_cksum_sz == 8);
135 
136     len = l1 + l2;
137 
138     for (i=0; i < iov_count; i++) {
139 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
140 	case GSS_IOV_BUFFER_TYPE_DATA:
141 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
142 	    break;
143 	default:
144 	    continue;
145 	}
146 
147 	len += iov[i].buffer.length;
148     }
149 
150     if (padding) {
151 	len += padding->buffer.length;
152     }
153 
154     ptr = malloc(len);
155     if (ptr == NULL)
156 	return ENOMEM;
157 
158     memcpy(ptr + ofs, v1, l1);
159     ofs += l1;
160     memcpy(ptr + ofs, v2, l2);
161     ofs += l2;
162 
163     for (i=0; i < iov_count; i++) {
164 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
165 	case GSS_IOV_BUFFER_TYPE_DATA:
166 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
167 	    break;
168 	default:
169 	    continue;
170 	}
171 
172 	memcpy(ptr + ofs,
173 	       iov[i].buffer.value,
174 	       iov[i].buffer.length);
175 	ofs += iov[i].buffer.length;
176     }
177 
178     if (padding) {
179 	memcpy(ptr + ofs,
180 	       padding->buffer.value,
181 	       padding->buffer.length);
182 	ofs += padding->buffer.length;
183     }
184 
185     ret = krb5_crypto_init(context, key, 0, &crypto);
186     if (ret) {
187 	free(ptr);
188 	return ret;
189     }
190 
191     ret = krb5_create_checksum(context,
192 			       crypto,
193 			       usage,
194 			       0,
195 			       ptr, len,
196 			       &CKSUM);
197     memset(ptr, 0, len);
198     free(ptr);
199     if (ret == 0) {
200 	memcpy(sgn_cksum, CKSUM.checksum.data, sgn_cksum_sz);
201 	free_Checksum(&CKSUM);
202     }
203     krb5_crypto_destroy(context, crypto);
204 
205     return ret;
206 }
207 
208 static krb5_error_code
209 arcfour_mic_cksum(krb5_context context,
210 		  krb5_keyblock *key, unsigned usage,
211 		  u_char *sgn_cksum, size_t sgn_cksum_sz,
212 		  const u_char *v1, size_t l1,
213 		  const void *v2, size_t l2,
214 		  const void *v3, size_t l3)
215 {
216     gss_iov_buffer_desc iov;
217 
218     iov.type = GSS_IOV_BUFFER_TYPE_SIGN_ONLY;
219     iov.buffer.value = rk_UNCONST(v3);
220     iov.buffer.length = l3;
221 
222     return arcfour_mic_cksum_iov(context, key, usage,
223 				 sgn_cksum, sgn_cksum_sz,
224 				 v1, l1, v2, l2,
225 				 &iov, 1, NULL);
226 }
227 
228 
229 OM_uint32
230 _gssapi_get_mic_arcfour(OM_uint32 * minor_status,
231 			const gsskrb5_ctx context_handle,
232 			krb5_context context,
233 			gss_qop_t qop_req,
234 			const gss_buffer_t message_buffer,
235 			gss_buffer_t message_token,
236 			krb5_keyblock *key)
237 {
238     krb5_error_code ret;
239     int32_t seq_number;
240     size_t len, total_len;
241     u_char k6_data[16], *p0, *p;
242     EVP_CIPHER_CTX *rc4_key;
243 
244     _gsskrb5_encap_length (22, &len, &total_len, GSS_KRB5_MECHANISM);
245 
246     message_token->length = total_len;
247     message_token->value  = malloc (total_len);
248     if (message_token->value == NULL) {
249 	*minor_status = ENOMEM;
250 	return GSS_S_FAILURE;
251     }
252 
253     p0 = _gssapi_make_mech_header(message_token->value,
254 				  len,
255 				  GSS_KRB5_MECHANISM);
256     p = p0;
257 
258     *p++ = 0x01; /* TOK_ID */
259     *p++ = 0x01;
260     *p++ = 0x11; /* SGN_ALG */
261     *p++ = 0x00;
262     *p++ = 0xff; /* Filler */
263     *p++ = 0xff;
264     *p++ = 0xff;
265     *p++ = 0xff;
266 
267     p = NULL;
268 
269     ret = arcfour_mic_cksum(context,
270 			    key, KRB5_KU_USAGE_SIGN,
271 			    p0 + 16, 8,  /* SGN_CKSUM */
272 			    p0, 8, /* TOK_ID, SGN_ALG, Filer */
273 			    message_buffer->value, message_buffer->length,
274 			    NULL, 0);
275     if (ret) {
276 	_gsskrb5_release_buffer(minor_status, message_token);
277 	*minor_status = ret;
278 	return GSS_S_FAILURE;
279     }
280 
281     ret = arcfour_mic_key(context, key,
282 			  p0 + 16, 8, /* SGN_CKSUM */
283 			  k6_data, sizeof(k6_data));
284     if (ret) {
285 	_gsskrb5_release_buffer(minor_status, message_token);
286 	*minor_status = ret;
287 	return GSS_S_FAILURE;
288     }
289 
290     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
291     krb5_auth_con_getlocalseqnumber (context,
292 				     context_handle->auth_context,
293 				     &seq_number);
294     p = p0 + 8; /* SND_SEQ */
295     _gsskrb5_encode_be_om_uint32(seq_number, p);
296 
297     krb5_auth_con_setlocalseqnumber (context,
298 				     context_handle->auth_context,
299 				     ++seq_number);
300     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
301 
302     memset (p + 4, (context_handle->more_flags & LOCAL) ? 0 : 0xff, 4);
303 
304 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
305     EVP_CIPHER_CTX rc4_keys;
306     rc4_key = &rc4_keys;
307     EVP_CIPHER_CTX_init(rc4_key);
308 #else
309     rc4_key = EVP_CIPHER_CTX_new();
310 #endif
311     EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
312     EVP_Cipher(rc4_key, p, p, 8);
313 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
314     EVP_CIPHER_CTX_cleanup(rc4_key);
315 #else
316     EVP_CIPHER_CTX_free(rc4_key);
317 #endif
318 
319     memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
320 
321     *minor_status = 0;
322     return GSS_S_COMPLETE;
323 }
324 
325 
326 OM_uint32
327 _gssapi_verify_mic_arcfour(OM_uint32 * minor_status,
328 			   const gsskrb5_ctx context_handle,
329 			   krb5_context context,
330 			   const gss_buffer_t message_buffer,
331 			   const gss_buffer_t token_buffer,
332 			   gss_qop_t * qop_state,
333 			   krb5_keyblock *key,
334 			   const char *type)
335 {
336     krb5_error_code ret;
337     uint32_t seq_number;
338     OM_uint32 omret;
339     u_char SND_SEQ[8], cksum_data[8], *p;
340     char k6_data[16];
341     int cmp;
342 
343     if (qop_state)
344 	*qop_state = 0;
345 
346     p = token_buffer->value;
347     omret = _gsskrb5_verify_header (&p,
348 				       token_buffer->length,
349 				       type,
350 				       GSS_KRB5_MECHANISM);
351     if (omret)
352 	return omret;
353 
354     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
355 	return GSS_S_BAD_SIG;
356     p += 2;
357     if (memcmp (p, "\xff\xff\xff\xff", 4) != 0)
358 	return GSS_S_BAD_MIC;
359     p += 4;
360 
361     ret = arcfour_mic_cksum(context,
362 			    key, KRB5_KU_USAGE_SIGN,
363 			    cksum_data, sizeof(cksum_data),
364 			    p - 8, 8,
365 			    message_buffer->value, message_buffer->length,
366 			    NULL, 0);
367     if (ret) {
368 	*minor_status = ret;
369 	return GSS_S_FAILURE;
370     }
371 
372     ret = arcfour_mic_key(context, key,
373 			  cksum_data, sizeof(cksum_data),
374 			  k6_data, sizeof(k6_data));
375     if (ret) {
376 	*minor_status = ret;
377 	return GSS_S_FAILURE;
378     }
379 
380     cmp = ct_memcmp(cksum_data, p + 8, 8);
381     if (cmp) {
382 	*minor_status = 0;
383 	return GSS_S_BAD_MIC;
384     }
385 
386     {
387 	EVP_CIPHER_CTX *rc4_key;
388 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
389 	EVP_CIPHER_CTX rc4_keys;
390 	rc4_key = &rc4_keys;
391 	EVP_CIPHER_CTX_init(rc4_key);
392 #else
393 	rc4_key = EVP_CIPHER_CTX_new();
394 #endif
395 
396 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, (void *)k6_data, NULL, 0);
397 	EVP_Cipher(rc4_key, SND_SEQ, p, 8);
398 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
399 	EVP_CIPHER_CTX_cleanup(rc4_key);
400 #else
401 	EVP_CIPHER_CTX_free(rc4_key);
402 #endif
403 
404 	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
405     }
406 
407     _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
408 
409     if (context_handle->more_flags & LOCAL)
410 	cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4);
411     else
412 	cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4);
413 
414     memset_s(SND_SEQ, sizeof(SND_SEQ), 0, sizeof(SND_SEQ));
415     if (cmp != 0) {
416 	*minor_status = 0;
417 	return GSS_S_BAD_MIC;
418     }
419 
420     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
421     omret = _gssapi_msg_order_check(context_handle->order, seq_number);
422     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
423     if (omret)
424 	return omret;
425 
426     *minor_status = 0;
427     return GSS_S_COMPLETE;
428 }
429 
430 OM_uint32
431 _gssapi_wrap_arcfour(OM_uint32 * minor_status,
432 		     const gsskrb5_ctx context_handle,
433 		     krb5_context context,
434 		     int conf_req_flag,
435 		     gss_qop_t qop_req,
436 		     const gss_buffer_t input_message_buffer,
437 		     int * conf_state,
438 		     gss_buffer_t output_message_buffer,
439 		     krb5_keyblock *key)
440 {
441     u_char Klocaldata[16], k6_data[16], *p, *p0;
442     size_t len, total_len, datalen;
443     krb5_keyblock Klocal;
444     krb5_error_code ret;
445     int32_t seq_number;
446 
447     if (conf_state)
448 	*conf_state = 0;
449 
450     datalen = input_message_buffer->length;
451 
452     if (IS_DCE_STYLE(context_handle)) {
453 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
454 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
455 	total_len += datalen;
456     } else {
457 	datalen += 1; /* padding */
458 	len = datalen + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
459 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
460     }
461 
462     output_message_buffer->length = total_len;
463     output_message_buffer->value  = malloc (total_len);
464     if (output_message_buffer->value == NULL) {
465 	*minor_status = ENOMEM;
466 	return GSS_S_FAILURE;
467     }
468 
469     p0 = _gssapi_make_mech_header(output_message_buffer->value,
470 				  len,
471 				  GSS_KRB5_MECHANISM);
472     p = p0;
473 
474     *p++ = 0x02; /* TOK_ID */
475     *p++ = 0x01;
476     *p++ = 0x11; /* SGN_ALG */
477     *p++ = 0x00;
478     if (conf_req_flag) {
479 	*p++ = 0x10; /* SEAL_ALG */
480 	*p++ = 0x00;
481     } else {
482 	*p++ = 0xff; /* SEAL_ALG */
483 	*p++ = 0xff;
484     }
485     *p++ = 0xff; /* Filler */
486     *p++ = 0xff;
487 
488     p = NULL;
489 
490     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
491     krb5_auth_con_getlocalseqnumber (context,
492 				     context_handle->auth_context,
493 				     &seq_number);
494 
495     _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
496 
497     krb5_auth_con_setlocalseqnumber (context,
498 				     context_handle->auth_context,
499 				     ++seq_number);
500     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
501 
502     memset (p0 + 8 + 4,
503 	    (context_handle->more_flags & LOCAL) ? 0 : 0xff,
504 	    4);
505 
506     krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
507 
508     /* p points to data */
509     p = p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
510     memcpy(p, input_message_buffer->value, input_message_buffer->length);
511 
512     if (!IS_DCE_STYLE(context_handle))
513 	p[input_message_buffer->length] = 1; /* padding */
514 
515     ret = arcfour_mic_cksum(context,
516 			    key, KRB5_KU_USAGE_SEAL,
517 			    p0 + 16, 8, /* SGN_CKSUM */
518 			    p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
519 			    p0 + 24, 8, /* Confounder */
520 			    p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
521 			    datalen);
522     if (ret) {
523 	*minor_status = ret;
524 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
525 	return GSS_S_FAILURE;
526     }
527 
528     {
529 	int i;
530 
531 	Klocal.keytype = key->keytype;
532 	Klocal.keyvalue.data = Klocaldata;
533 	Klocal.keyvalue.length = sizeof(Klocaldata);
534 
535 	for (i = 0; i < 16; i++)
536 	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
537     }
538     ret = arcfour_mic_key(context, &Klocal,
539 			  p0 + 8, 4, /* SND_SEQ */
540 			  k6_data, sizeof(k6_data));
541     memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
542     if (ret) {
543 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
544 	*minor_status = ret;
545 	return GSS_S_FAILURE;
546     }
547 
548 
549     if(conf_req_flag) {
550 	EVP_CIPHER_CTX *rc4_key;
551 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
552 	EVP_CIPHER_CTX rc4_keys;
553 	rc4_key = &rc4_keys;
554 	EVP_CIPHER_CTX_init(rc4_key);
555 #else
556 	rc4_key = EVP_CIPHER_CTX_new();
557 #endif
558 
559 	EVP_CIPHER_CTX_init(rc4_key);
560 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
561 	EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8 + datalen);
562 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
563 	EVP_CIPHER_CTX_cleanup(rc4_key);
564 #else
565 	EVP_CIPHER_CTX_free(rc4_key);
566 #endif
567     }
568     memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
569 
570     ret = arcfour_mic_key(context, key,
571 			  p0 + 16, 8, /* SGN_CKSUM */
572 			  k6_data, sizeof(k6_data));
573     if (ret) {
574 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
575 	*minor_status = ret;
576 	return GSS_S_FAILURE;
577     }
578 
579     {
580 	EVP_CIPHER_CTX *rc4_key;
581 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
582 	EVP_CIPHER_CTX rc4_keys;
583 	rc4_key = &rc4_keys;
584 	EVP_CIPHER_CTX_init(rc4_key);
585 #else
586 	rc4_key = EVP_CIPHER_CTX_new();
587 #endif
588 
589 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
590 	EVP_Cipher(rc4_key, p0 + 8, p0 + 8 /* SND_SEQ */, 8);
591 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
592 	EVP_CIPHER_CTX_cleanup(rc4_key);
593 #else
594 	EVP_CIPHER_CTX_free(rc4_key);
595 #endif
596 	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
597     }
598 
599     if (conf_state)
600 	*conf_state = conf_req_flag;
601 
602     *minor_status = 0;
603     return GSS_S_COMPLETE;
604 }
605 
606 OM_uint32 _gssapi_unwrap_arcfour(OM_uint32 *minor_status,
607 				 const gsskrb5_ctx context_handle,
608 				 krb5_context context,
609 				 const gss_buffer_t input_message_buffer,
610 				 gss_buffer_t output_message_buffer,
611 				 int *conf_state,
612 				 gss_qop_t *qop_state,
613 				 krb5_keyblock *key)
614 {
615     u_char Klocaldata[16];
616     krb5_keyblock Klocal;
617     krb5_error_code ret;
618     uint32_t seq_number;
619     size_t datalen;
620     OM_uint32 omret;
621     u_char k6_data[16], SND_SEQ[8], Confounder[8];
622     u_char cksum_data[8];
623     u_char *p, *p0;
624     int cmp;
625     int conf_flag;
626     size_t padlen = 0, len;
627 
628     if (conf_state)
629 	*conf_state = 0;
630     if (qop_state)
631 	*qop_state = 0;
632 
633     p0 = input_message_buffer->value;
634 
635     if (IS_DCE_STYLE(context_handle)) {
636 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE +
637 	    GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE;
638 	if (input_message_buffer->length < len)
639 	    return GSS_S_BAD_MECH;
640     } else {
641 	len = input_message_buffer->length;
642     }
643 
644     omret = _gssapi_verify_mech_header(&p0,
645 				       len,
646 				       GSS_KRB5_MECHANISM);
647     if (omret)
648 	return omret;
649 
650     /* length of mech header */
651     len = (p0 - (u_char *)input_message_buffer->value) +
652 	GSS_ARCFOUR_WRAP_TOKEN_SIZE;
653 
654     if (len > input_message_buffer->length)
655 	return GSS_S_BAD_MECH;
656 
657     /* length of data */
658     datalen = input_message_buffer->length - len;
659 
660     p = p0;
661 
662     if (memcmp(p, "\x02\x01", 2) != 0)
663 	return GSS_S_BAD_SIG;
664     p += 2;
665     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
666 	return GSS_S_BAD_SIG;
667     p += 2;
668 
669     if (memcmp (p, "\x10\x00", 2) == 0)
670 	conf_flag = 1;
671     else if (memcmp (p, "\xff\xff", 2) == 0)
672 	conf_flag = 0;
673     else
674 	return GSS_S_BAD_SIG;
675 
676     p += 2;
677     if (memcmp (p, "\xff\xff", 2) != 0)
678 	return GSS_S_BAD_MIC;
679     p = NULL;
680 
681     ret = arcfour_mic_key(context, key,
682 			  p0 + 16, 8, /* SGN_CKSUM */
683 			  k6_data, sizeof(k6_data));
684     if (ret) {
685 	*minor_status = ret;
686 	return GSS_S_FAILURE;
687     }
688 
689     {
690 	EVP_CIPHER_CTX *rc4_key;
691 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
692 	EVP_CIPHER_CTX rc4_keys;
693 	rc4_key = &rc4_keys;
694 	EVP_CIPHER_CTX_init(rc4_key);
695 #else
696 	rc4_key = EVP_CIPHER_CTX_new();
697 #endif
698 
699 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
700 	EVP_Cipher(rc4_key, SND_SEQ, p0 + 8, 8);
701 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
702 	EVP_CIPHER_CTX_cleanup(rc4_key);
703 #else
704 	EVP_CIPHER_CTX_free(rc4_key);
705 #endif
706 	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
707     }
708 
709     _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
710 
711     if (context_handle->more_flags & LOCAL)
712 	cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4);
713     else
714 	cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4);
715 
716     if (cmp != 0) {
717 	*minor_status = 0;
718 	return GSS_S_BAD_MIC;
719     }
720 
721     {
722 	int i;
723 
724 	Klocal.keytype = key->keytype;
725 	Klocal.keyvalue.data = Klocaldata;
726 	Klocal.keyvalue.length = sizeof(Klocaldata);
727 
728 	for (i = 0; i < 16; i++)
729 	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
730     }
731     ret = arcfour_mic_key(context, &Klocal,
732 			  SND_SEQ, 4,
733 			  k6_data, sizeof(k6_data));
734     memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
735     if (ret) {
736 	*minor_status = ret;
737 	return GSS_S_FAILURE;
738     }
739 
740     output_message_buffer->value = malloc(datalen);
741     if (output_message_buffer->value == NULL) {
742 	*minor_status = ENOMEM;
743 	return GSS_S_FAILURE;
744     }
745     output_message_buffer->length = datalen;
746 
747     if(conf_flag) {
748 	EVP_CIPHER_CTX *rc4_key;
749 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
750 	EVP_CIPHER_CTX rc4_keys;
751 	rc4_key = &rc4_keys;
752 	EVP_CIPHER_CTX_init(rc4_key);
753 #else
754 	rc4_key = EVP_CIPHER_CTX_new();
755 #endif
756 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
757 	EVP_Cipher(rc4_key, Confounder, p0 + 24, 8);
758 	EVP_Cipher(rc4_key, output_message_buffer->value, p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, datalen);
759 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
760 	EVP_CIPHER_CTX_cleanup(rc4_key);
761 #else
762 	EVP_CIPHER_CTX_free(rc4_key);
763 #endif
764     } else {
765 	memcpy(Confounder, p0 + 24, 8); /* Confounder */
766 	memcpy(output_message_buffer->value,
767 	       p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
768 	       datalen);
769     }
770     memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
771 
772     if (!IS_DCE_STYLE(context_handle)) {
773 	ret = _gssapi_verify_pad(output_message_buffer, datalen, &padlen);
774 	if (ret) {
775 	    _gsskrb5_release_buffer(minor_status, output_message_buffer);
776 	    *minor_status = 0;
777 	    return ret;
778 	}
779 	output_message_buffer->length -= padlen;
780     }
781 
782     ret = arcfour_mic_cksum(context,
783 			    key, KRB5_KU_USAGE_SEAL,
784 			    cksum_data, sizeof(cksum_data),
785 			    p0, 8,
786 			    Confounder, sizeof(Confounder),
787 			    output_message_buffer->value,
788 			    output_message_buffer->length + padlen);
789     if (ret) {
790 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
791 	*minor_status = ret;
792 	return GSS_S_FAILURE;
793     }
794 
795     cmp = ct_memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */
796     if (cmp) {
797 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
798 	*minor_status = 0;
799 	return GSS_S_BAD_MIC;
800     }
801 
802     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
803     omret = _gssapi_msg_order_check(context_handle->order, seq_number);
804     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
805     if (omret)
806 	return omret;
807 
808     if (conf_state)
809 	*conf_state = conf_flag;
810 
811     *minor_status = 0;
812     return GSS_S_COMPLETE;
813 }
814 
815 static OM_uint32
816 max_wrap_length_arcfour(const gsskrb5_ctx ctx,
817 			krb5_crypto crypto,
818 			size_t input_length,
819 			OM_uint32 *max_input_size)
820 {
821     /*
822      * if GSS_C_DCE_STYLE is in use:
823      *  - we only need to encapsulate the WRAP token
824      * However, since this is a fixed since, we just
825      */
826     if (IS_DCE_STYLE(ctx)) {
827 	size_t len, total_len;
828 
829 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
830 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
831 
832 	if (input_length < len)
833 	    *max_input_size = 0;
834 	else
835 	    *max_input_size = input_length - len;
836 
837     } else {
838 	size_t extrasize = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
839 	size_t blocksize = 8;
840 	size_t len, total_len;
841 
842 	len = 8 + input_length + blocksize + extrasize;
843 
844 	_gsskrb5_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
845 
846 	total_len -= input_length; /* token length */
847 	if (total_len < input_length) {
848 	    *max_input_size = (input_length - total_len);
849 	    (*max_input_size) &= (~(OM_uint32)(blocksize - 1));
850 	} else {
851 	    *max_input_size = 0;
852 	}
853     }
854 
855     return GSS_S_COMPLETE;
856 }
857 
858 OM_uint32
859 _gssapi_wrap_size_arcfour(OM_uint32 *minor_status,
860 			  const gsskrb5_ctx ctx,
861 			  krb5_context context,
862 			  int conf_req_flag,
863 			  gss_qop_t qop_req,
864 			  OM_uint32 req_output_size,
865 			  OM_uint32 *max_input_size,
866 			  krb5_keyblock *key)
867 {
868     krb5_error_code ret;
869     krb5_crypto crypto;
870 
871     ret = krb5_crypto_init(context, key, 0, &crypto);
872     if (ret != 0) {
873 	*minor_status = ret;
874 	return GSS_S_FAILURE;
875     }
876 
877     ret = max_wrap_length_arcfour(ctx, crypto,
878 				  req_output_size, max_input_size);
879     if (ret != 0) {
880 	*minor_status = ret;
881 	krb5_crypto_destroy(context, crypto);
882 	return GSS_S_FAILURE;
883     }
884 
885     krb5_crypto_destroy(context, crypto);
886 
887     return GSS_S_COMPLETE;
888 }
889 
890 OM_uint32
891 _gssapi_wrap_iov_length_arcfour(OM_uint32 *minor_status,
892 				gsskrb5_ctx ctx,
893 				krb5_context context,
894 				int conf_req_flag,
895 				gss_qop_t qop_req,
896 				int *conf_state,
897 				gss_iov_buffer_desc *iov,
898 				int iov_count)
899 {
900     OM_uint32 major_status;
901     size_t data_len = 0;
902     int i;
903     gss_iov_buffer_desc *header = NULL;
904     gss_iov_buffer_desc *padding = NULL;
905     gss_iov_buffer_desc *trailer = NULL;
906 
907     *minor_status = 0;
908 
909     for (i = 0; i < iov_count; i++) {
910 	switch(GSS_IOV_BUFFER_TYPE(iov[i].type)) {
911 	case GSS_IOV_BUFFER_TYPE_EMPTY:
912 	    break;
913 	case GSS_IOV_BUFFER_TYPE_DATA:
914 	    data_len += iov[i].buffer.length;
915 	    break;
916 	case GSS_IOV_BUFFER_TYPE_HEADER:
917 	    if (header != NULL) {
918 		*minor_status = EINVAL;
919 		return GSS_S_FAILURE;
920 	    }
921 	    header = &iov[i];
922 	    break;
923 	case GSS_IOV_BUFFER_TYPE_TRAILER:
924 	    if (trailer != NULL) {
925 		*minor_status = EINVAL;
926 		return GSS_S_FAILURE;
927 	    }
928 	    trailer = &iov[i];
929 	    break;
930 	case GSS_IOV_BUFFER_TYPE_PADDING:
931 	    if (padding != NULL) {
932 		*minor_status = EINVAL;
933 		return GSS_S_FAILURE;
934 	    }
935 	    padding = &iov[i];
936 	    break;
937 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
938 	    break;
939 	default:
940 	    *minor_status = EINVAL;
941 	    return GSS_S_FAILURE;
942 	}
943     }
944 
945     major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer);
946     if (major_status != GSS_S_COMPLETE) {
947 	    return major_status;
948     }
949 
950     if (IS_DCE_STYLE(ctx)) {
951 	size_t len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
952 	size_t total_len;
953 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
954 	header->buffer.length = total_len;
955     } else {
956 	size_t len;
957 	size_t total_len;
958 	if (padding) {
959 	    data_len += 1; /* padding */
960 	}
961 	len = data_len + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
962 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
963 	header->buffer.length = total_len - data_len;
964     }
965 
966     if (trailer) {
967 	trailer->buffer.length = 0;
968     }
969 
970     if (padding) {
971 	padding->buffer.length = 1;
972     }
973 
974     return GSS_S_COMPLETE;
975 }
976 
977 OM_uint32
978 _gssapi_wrap_iov_arcfour(OM_uint32 *minor_status,
979 			 gsskrb5_ctx ctx,
980 			 krb5_context context,
981 			 int conf_req_flag,
982 			 int *conf_state,
983 			 gss_iov_buffer_desc *iov,
984 			 int iov_count,
985 			 krb5_keyblock *key)
986 {
987     OM_uint32 major_status, junk;
988     gss_iov_buffer_desc *header, *padding, *trailer;
989     krb5_error_code kret;
990     int32_t seq_number;
991     u_char Klocaldata[16], k6_data[16], *p, *p0;
992     size_t make_len = 0;
993     size_t header_len = 0;
994     size_t data_len = 0;
995     krb5_keyblock Klocal;
996     int i;
997 
998     header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
999     padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
1000     trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
1001 
1002     major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer);
1003     if (major_status != GSS_S_COMPLETE) {
1004 	return major_status;
1005     }
1006 
1007     for (i = 0; i < iov_count; i++) {
1008 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1009 	case GSS_IOV_BUFFER_TYPE_DATA:
1010 	    break;
1011 	default:
1012 	    continue;
1013 	}
1014 
1015 	data_len += iov[i].buffer.length;
1016     }
1017 
1018     if (padding) {
1019 	data_len += 1;
1020     }
1021 
1022     if (IS_DCE_STYLE(ctx)) {
1023 	size_t unwrapped_len;
1024 	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
1025 	_gssapi_encap_length(unwrapped_len,
1026 			     &make_len,
1027 			     &header_len,
1028 			     GSS_KRB5_MECHANISM);
1029     } else {
1030 	size_t unwrapped_len;
1031 	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + data_len;
1032 	_gssapi_encap_length(unwrapped_len,
1033 			     &make_len,
1034 			     &header_len,
1035 			     GSS_KRB5_MECHANISM);
1036 	header_len -= data_len;
1037     }
1038 
1039     if (GSS_IOV_BUFFER_FLAGS(header->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) {
1040 	major_status = _gk_allocate_buffer(minor_status, header,
1041 					   header_len);
1042 	if (major_status != GSS_S_COMPLETE)
1043 	    goto failure;
1044     } else if (header->buffer.length < header_len) {
1045 	*minor_status = KRB5_BAD_MSIZE;
1046 	major_status = GSS_S_FAILURE;
1047 	goto failure;
1048     } else {
1049 	header->buffer.length = header_len;
1050     }
1051 
1052     if (padding) {
1053 	if (GSS_IOV_BUFFER_FLAGS(padding->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) {
1054 	    major_status = _gk_allocate_buffer(minor_status, padding, 1);
1055 	    if (major_status != GSS_S_COMPLETE)
1056 		goto failure;
1057 	} else if (padding->buffer.length < 1) {
1058 	    *minor_status = KRB5_BAD_MSIZE;
1059 	    major_status = GSS_S_FAILURE;
1060 	    goto failure;
1061 	} else {
1062 	    padding->buffer.length = 1;
1063 	}
1064 	memset(padding->buffer.value, 1, 1);
1065     }
1066 
1067     if (trailer) {
1068 	trailer->buffer.length = 0;
1069 	trailer->buffer.value = NULL;
1070     }
1071 
1072     p0 = _gssapi_make_mech_header(header->buffer.value,
1073 				  make_len,
1074 				  GSS_KRB5_MECHANISM);
1075     p = p0;
1076 
1077     *p++ = 0x02; /* TOK_ID */
1078     *p++ = 0x01;
1079     *p++ = 0x11; /* SGN_ALG */
1080     *p++ = 0x00;
1081     if (conf_req_flag) {
1082 	*p++ = 0x10; /* SEAL_ALG */
1083 	*p++ = 0x00;
1084     } else {
1085 	*p++ = 0xff; /* SEAL_ALG */
1086 	*p++ = 0xff;
1087     }
1088     *p++ = 0xff; /* Filler */
1089     *p++ = 0xff;
1090 
1091     p = NULL;
1092 
1093     HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1094     krb5_auth_con_getlocalseqnumber(context,
1095 				    ctx->auth_context,
1096 				    &seq_number);
1097     _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
1098 
1099     krb5_auth_con_setlocalseqnumber(context,
1100 				    ctx->auth_context,
1101 				    ++seq_number);
1102     HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1103 
1104     memset(p0 + 8 + 4,
1105            (ctx->more_flags & LOCAL) ? 0 : 0xff,
1106            4);
1107 
1108     krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
1109 
1110     /* Sign Data */
1111     kret = arcfour_mic_cksum_iov(context,
1112 				 key, KRB5_KU_USAGE_SEAL,
1113 				 p0 + 16, 8, /* SGN_CKSUM */
1114 				 p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
1115 				 p0 + 24, 8, /* Confounder */
1116 				 iov, iov_count, /* Data + SignOnly */
1117 				 padding); /* padding */
1118     if (kret) {
1119 	*minor_status = kret;
1120 	major_status = GSS_S_FAILURE;
1121 	goto failure;
1122     }
1123 
1124     Klocal.keytype = key->keytype;
1125     Klocal.keyvalue.data = Klocaldata;
1126     Klocal.keyvalue.length = sizeof(Klocaldata);
1127 
1128     for (i = 0; i < 16; i++) {
1129 	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1130     }
1131     kret = arcfour_mic_key(context, &Klocal,
1132 			   p0 + 8, 4, /* SND_SEQ */
1133 			   k6_data, sizeof(k6_data));
1134     memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
1135     if (kret) {
1136 	*minor_status = kret;
1137 	major_status = GSS_S_FAILURE;
1138 	goto failure;
1139     }
1140 
1141     if (conf_req_flag) {
1142 	EVP_CIPHER_CTX *rc4_key;
1143 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1144 	EVP_CIPHER_CTX rc4_keys;
1145 	rc4_key = &rc4_keys;
1146 	EVP_CIPHER_CTX_init(rc4_key);
1147 #else
1148 	rc4_key = EVP_CIPHER_CTX_new();
1149 #endif
1150 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1151 
1152 	/* Confounder */
1153 	EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8);
1154 
1155 	/* Seal Data */
1156 	for (i=0; i < iov_count; i++) {
1157 	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1158 	    case GSS_IOV_BUFFER_TYPE_DATA:
1159 		break;
1160 	    default:
1161 		continue;
1162 	    }
1163 
1164 	    EVP_Cipher(rc4_key, iov[i].buffer.value,
1165 		       iov[i].buffer.value, iov[i].buffer.length);
1166 	}
1167 
1168 	/* Padding */
1169 	if (padding) {
1170 	    EVP_Cipher(rc4_key, padding->buffer.value,
1171 		       padding->buffer.value, padding->buffer.length);
1172 	}
1173 
1174 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1175 	EVP_CIPHER_CTX_cleanup(rc4_key);
1176 #else
1177 	EVP_CIPHER_CTX_free(rc4_key);
1178 #endif
1179     }
1180     memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1181 
1182     kret = arcfour_mic_key(context, key,
1183 			   p0 + 16, 8, /* SGN_CKSUM */
1184 			   k6_data, sizeof(k6_data));
1185     if (kret) {
1186 	*minor_status = kret;
1187 	major_status = GSS_S_FAILURE;
1188         return major_status;
1189     }
1190 
1191     {
1192 	EVP_CIPHER_CTX *rc4_key;
1193 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1194 	EVP_CIPHER_CTX rc4_keys;
1195 	rc4_key = &rc4_keys;
1196 	EVP_CIPHER_CTX_init(rc4_key);
1197 #else
1198 	rc4_key = EVP_CIPHER_CTX_new();
1199 #endif
1200 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1201 	EVP_Cipher(rc4_key, p0 + 8, p0 + 8, 8); /* SND_SEQ */
1202 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1203 	EVP_CIPHER_CTX_cleanup(rc4_key);
1204 #else
1205 	EVP_CIPHER_CTX_free(rc4_key);
1206 #endif
1207 
1208 	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1209     }
1210 
1211     if (conf_state)
1212 	*conf_state = conf_req_flag;
1213 
1214     *minor_status = 0;
1215     return GSS_S_COMPLETE;
1216 
1217 failure:
1218 
1219     gss_release_iov_buffer(&junk, iov, iov_count);
1220 
1221     return major_status;
1222 }
1223 
1224 OM_uint32
1225 _gssapi_unwrap_iov_arcfour(OM_uint32 *minor_status,
1226 			   gsskrb5_ctx ctx,
1227 			   krb5_context context,
1228 			   int *pconf_state,
1229 			   gss_qop_t *pqop_state,
1230 			   gss_iov_buffer_desc *iov,
1231 			   int iov_count,
1232 			   krb5_keyblock *key)
1233 {
1234     OM_uint32 major_status;
1235     gss_iov_buffer_desc *header, *padding, *trailer;
1236     krb5_keyblock Klocal;
1237     uint8_t Klocaldata[16];
1238     uint8_t k6_data[16], snd_seq[8], Confounder[8];
1239     uint8_t cksum_data[8];
1240     uint8_t *_p = NULL;
1241     const uint8_t *p, *p0;
1242     size_t verify_len = 0;
1243     uint32_t seq_number;
1244     size_t hlen = 0;
1245     int conf_state;
1246     int cmp;
1247     size_t i;
1248     krb5_error_code kret;
1249     OM_uint32 ret;
1250 
1251     if (pconf_state != NULL) {
1252 	*pconf_state = 0;
1253     }
1254     if (pqop_state != NULL) {
1255 	*pqop_state = 0;
1256     }
1257 
1258     header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
1259     padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
1260     trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
1261 
1262     /* Check if the packet is correct */
1263     major_status = _gk_verify_buffers(minor_status,
1264 				  ctx,
1265 				  header,
1266 				  padding,
1267 				  trailer);
1268     if (major_status != GSS_S_COMPLETE) {
1269 	return major_status;
1270     }
1271 
1272     if (padding != NULL && padding->buffer.length != 1) {
1273 	*minor_status = EINVAL;
1274 	return GSS_S_FAILURE;
1275     }
1276 
1277     if (IS_DCE_STYLE(context)) {
1278 	verify_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE +
1279 		     GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE;
1280 	if (header->buffer.length > verify_len) {
1281 	    return GSS_S_BAD_MECH;
1282 	}
1283     } else {
1284 	verify_len = header->buffer.length;
1285     }
1286     _p = header->buffer.value;
1287 
1288     ret = _gssapi_verify_mech_header(&_p,
1289 				     verify_len,
1290 				     GSS_KRB5_MECHANISM);
1291     if (ret) {
1292 	return ret;
1293     }
1294     p0 = _p;
1295 
1296     /* length of mech header */
1297     hlen = (p0 - (uint8_t *)header->buffer.value);
1298     hlen += GSS_ARCFOUR_WRAP_TOKEN_SIZE;
1299 
1300     if (hlen > header->buffer.length) {
1301 	return GSS_S_BAD_MECH;
1302     }
1303 
1304     p = p0;
1305 
1306     if (memcmp(p, "\x02\x01", 2) != 0)
1307 	return GSS_S_BAD_SIG;
1308     p += 2;
1309     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
1310 	return GSS_S_BAD_SIG;
1311     p += 2;
1312 
1313     if (memcmp (p, "\x10\x00", 2) == 0)
1314 	conf_state = 1;
1315     else if (memcmp (p, "\xff\xff", 2) == 0)
1316 	conf_state = 0;
1317     else
1318 	return GSS_S_BAD_SIG;
1319 
1320     p += 2;
1321     if (memcmp (p, "\xff\xff", 2) != 0)
1322 	return GSS_S_BAD_MIC;
1323     p = NULL;
1324 
1325     kret = arcfour_mic_key(context,
1326 			   key,
1327 			   p0 + 16, /* SGN_CKSUM */
1328 			   8,       /* SGN_CKSUM_LEN */
1329 			   k6_data,
1330 			   sizeof(k6_data));
1331     if (kret) {
1332 	*minor_status = kret;
1333 	return GSS_S_FAILURE;
1334     }
1335 
1336     {
1337 	EVP_CIPHER_CTX *rc4_key;
1338 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1339 	EVP_CIPHER_CTX rc4_keys;
1340 	rc4_key = &rc4_keys;
1341 	EVP_CIPHER_CTX_init(rc4_key);
1342 #else
1343 	rc4_key = EVP_CIPHER_CTX_new();
1344 #endif
1345 
1346 	EVP_CIPHER_CTX_init(rc4_key);
1347 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1348 	EVP_Cipher(rc4_key, snd_seq, p0 + 8, 8); /* SND_SEQ */
1349 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1350 	EVP_CIPHER_CTX_cleanup(rc4_key);
1351 #else
1352 	EVP_CIPHER_CTX_free(rc4_key);
1353 #endif
1354 
1355 	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1356     }
1357 
1358     _gsskrb5_decode_be_om_uint32(snd_seq, &seq_number);
1359 
1360     if (ctx->more_flags & LOCAL) {
1361 	cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4);
1362     } else {
1363 	cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4);
1364     }
1365     if (cmp != 0) {
1366 	*minor_status = 0;
1367 	return GSS_S_BAD_MIC;
1368     }
1369 
1370     if (ctx->more_flags & LOCAL) {
1371 	cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4);
1372     } else {
1373 	cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4);
1374     }
1375     if (cmp != 0) {
1376 	*minor_status = 0;
1377 	return GSS_S_BAD_MIC;
1378     }
1379 
1380     /* keyblock */
1381     Klocal.keytype = key->keytype;
1382     Klocal.keyvalue.data = Klocaldata;
1383     Klocal.keyvalue.length = sizeof(Klocaldata);
1384 
1385     for (i = 0; i < 16; i++) {
1386 	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1387     }
1388 
1389     kret = arcfour_mic_key(context,
1390 			   &Klocal,
1391 			   snd_seq,
1392 			   4,
1393 			   k6_data, sizeof(k6_data));
1394     memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
1395     if (kret) {
1396 	*minor_status = kret;
1397 	return GSS_S_FAILURE;
1398     }
1399 
1400     if (conf_state == 1) {
1401 	EVP_CIPHER_CTX *rc4_key;
1402 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1403 	EVP_CIPHER_CTX rc4_keys;
1404 	rc4_key = &rc4_keys;
1405 	EVP_CIPHER_CTX_init(rc4_key);
1406 #else
1407 	rc4_key = EVP_CIPHER_CTX_new();
1408 #endif
1409 
1410 	EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1411 
1412 	/* Confounder */
1413 	EVP_Cipher(rc4_key, Confounder, p0 + 24, 8);
1414 
1415 	/* Data */
1416 	for (i = 0; i < iov_count; i++) {
1417 	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1418 	    case GSS_IOV_BUFFER_TYPE_DATA:
1419 		break;
1420 	    default:
1421 		continue;
1422 	    }
1423 
1424 	    EVP_Cipher(rc4_key, iov[i].buffer.value,
1425 		       iov[i].buffer.value, iov[i].buffer.length);
1426 	}
1427 
1428 	/* Padding */
1429 	if (padding) {
1430 	    EVP_Cipher(rc4_key, padding->buffer.value,
1431 		       padding->buffer.value, padding->buffer.length);
1432 	}
1433 
1434 #if OPENSSL_VERSION_NUMBER < 0x10100000UL
1435 	EVP_CIPHER_CTX_cleanup(rc4_key);
1436 #else
1437 	EVP_CIPHER_CTX_free(rc4_key);
1438 #endif
1439     } else {
1440 	/* Confounder */
1441 	memcpy(Confounder, p0 + 24, 8);
1442     }
1443     memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1444 
1445     /* Prepare the buffer for signing */
1446     kret = arcfour_mic_cksum_iov(context,
1447 				 key, KRB5_KU_USAGE_SEAL,
1448 				 cksum_data, sizeof(cksum_data),
1449 				 p0, 8,
1450 				 Confounder, sizeof(Confounder),
1451 				 iov, iov_count,
1452 				 padding);
1453     if (kret) {
1454 	*minor_status = kret;
1455 	return GSS_S_FAILURE;
1456     }
1457 
1458     cmp = memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */
1459     if (cmp != 0) {
1460 	*minor_status = 0;
1461 	return GSS_S_BAD_MIC;
1462     }
1463 
1464     if (padding) {
1465 	size_t plen;
1466 
1467 	ret = _gssapi_verify_pad(&padding->buffer, 1, &plen);
1468 	if (ret) {
1469 	    *minor_status = 0;
1470 	    return ret;
1471 	}
1472     }
1473 
1474     HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1475     ret = _gssapi_msg_order_check(ctx->order, seq_number);
1476     HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1477     if (ret != 0) {
1478 	return ret;
1479     }
1480 
1481     if (pconf_state) {
1482 	*pconf_state = conf_state;
1483     }
1484 
1485     *minor_status = 0;
1486     return GSS_S_COMPLETE;
1487 }
1488