xref: /netbsd-src/crypto/external/bsd/heimdal/dist/lib/gssapi/krb5/arcfour.c (revision e89934bbf778a6d6d6894877c4da59d0c7835b0f)
1 /*	$NetBSD: arcfour.c,v 1.2 2017/01/28 21:31:46 christos Exp $	*/
2 
3 /*
4  * Copyright (c) 2003 - 2006 Kungliga Tekniska Högskolan
5  * (Royal Institute of Technology, Stockholm, Sweden).
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  *
12  * 1. Redistributions of source code must retain the above copyright
13  *    notice, this list of conditions and the following disclaimer.
14  *
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * 3. Neither the name of the Institute nor the names of its contributors
20  *    may be used to endorse or promote products derived from this software
21  *    without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
24  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26  * ARE DISCLAIMED.  IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
27  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
28  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
29  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
32  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33  * SUCH DAMAGE.
34  */
35 
36 #include "gsskrb5_locl.h"
37 
38 /*
39  * Implements draft-brezak-win2k-krb-rc4-hmac-04.txt
40  *
41  * The arcfour message have the following formats:
42  *
43  * MIC token
44  * 	TOK_ID[2] = 01 01
45  *	SGN_ALG[2] = 11 00
46  *	Filler[4]
47  *	SND_SEQ[8]
48  *	SGN_CKSUM[8]
49  *
50  * WRAP token
51  *	TOK_ID[2] = 02 01
52  *	SGN_ALG[2];
53  *	SEAL_ALG[2]
54  *	Filler[2]
55  *	SND_SEQ[2]
56  *	SGN_CKSUM[8]
57  *	Confounder[8]
58  */
59 
60 /*
61  * WRAP in DCE-style have a fixed size header, the oid and length over
62  * the WRAP header is a total of
63  * GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE +
64  * GSS_ARCFOUR_WRAP_TOKEN_SIZE byte (ie total of 45 bytes overhead,
65  * remember the 2 bytes from APPL [0] SEQ).
66  */
67 
68 #define GSS_ARCFOUR_WRAP_TOKEN_SIZE 32
69 #define GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE 13
70 
71 
72 static krb5_error_code
73 arcfour_mic_key(krb5_context context, krb5_keyblock *key,
74 		const void *cksum_data, size_t cksum_size,
75 		void *key6_data, size_t key6_size)
76 {
77     krb5_error_code ret;
78 
79     Checksum cksum_k5;
80     krb5_keyblock key5;
81     char k5_data[16];
82 
83     Checksum cksum_k6;
84 
85     char T[4];
86 
87     memset(T, 0, 4);
88     cksum_k5.checksum.data = k5_data;
89     cksum_k5.checksum.length = sizeof(k5_data);
90 
91     if (key->keytype == KRB5_ENCTYPE_ARCFOUR_HMAC_MD5_56) {
92 	char L40[14] = "fortybits";
93 
94 	memcpy(L40 + 10, T, sizeof(T));
95 	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
96 			L40, 14, 0, key, &cksum_k5);
97 	memset(&k5_data[7], 0xAB, 9);
98     } else {
99 	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
100 			T, 4, 0, key, &cksum_k5);
101     }
102     if (ret)
103 	return ret;
104 
105     key5.keytype = KRB5_ENCTYPE_ARCFOUR_HMAC_MD5;
106     key5.keyvalue = cksum_k5.checksum;
107 
108     cksum_k6.checksum.data = key6_data;
109     cksum_k6.checksum.length = key6_size;
110 
111     return krb5_hmac(context, CKSUMTYPE_RSA_MD5,
112 		     cksum_data, cksum_size, 0, &key5, &cksum_k6);
113 }
114 
115 
116 static krb5_error_code
117 arcfour_mic_cksum_iov(krb5_context context,
118 		      krb5_keyblock *key, unsigned usage,
119 		      u_char *sgn_cksum, size_t sgn_cksum_sz,
120 		      const u_char *v1, size_t l1,
121 		      const void *v2, size_t l2,
122 		      const gss_iov_buffer_desc *iov,
123 		      int iov_count,
124 		      const gss_iov_buffer_desc *padding)
125 {
126     Checksum CKSUM;
127     u_char *ptr;
128     size_t len;
129     size_t ofs = 0;
130     int i;
131     krb5_crypto crypto;
132     krb5_error_code ret;
133 
134     assert(sgn_cksum_sz == 8);
135 
136     len = l1 + l2;
137 
138     for (i=0; i < iov_count; i++) {
139 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
140 	case GSS_IOV_BUFFER_TYPE_DATA:
141 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
142 	    break;
143 	default:
144 	    continue;
145 	}
146 
147 	len += iov[i].buffer.length;
148     }
149 
150     if (padding) {
151 	len += padding->buffer.length;
152     }
153 
154     ptr = malloc(len);
155     if (ptr == NULL)
156 	return ENOMEM;
157 
158     memcpy(ptr + ofs, v1, l1);
159     ofs += l1;
160     memcpy(ptr + ofs, v2, l2);
161     ofs += l2;
162 
163     for (i=0; i < iov_count; i++) {
164 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
165 	case GSS_IOV_BUFFER_TYPE_DATA:
166 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
167 	    break;
168 	default:
169 	    continue;
170 	}
171 
172 	memcpy(ptr + ofs,
173 	       iov[i].buffer.value,
174 	       iov[i].buffer.length);
175 	ofs += iov[i].buffer.length;
176     }
177 
178     if (padding) {
179 	memcpy(ptr + ofs,
180 	       padding->buffer.value,
181 	       padding->buffer.length);
182 	ofs += padding->buffer.length;
183     }
184 
185     ret = krb5_crypto_init(context, key, 0, &crypto);
186     if (ret) {
187 	free(ptr);
188 	return ret;
189     }
190 
191     ret = krb5_create_checksum(context,
192 			       crypto,
193 			       usage,
194 			       0,
195 			       ptr, len,
196 			       &CKSUM);
197     memset(ptr, 0, len);
198     free(ptr);
199     if (ret == 0) {
200 	memcpy(sgn_cksum, CKSUM.checksum.data, sgn_cksum_sz);
201 	free_Checksum(&CKSUM);
202     }
203     krb5_crypto_destroy(context, crypto);
204 
205     return ret;
206 }
207 
208 static krb5_error_code
209 arcfour_mic_cksum(krb5_context context,
210 		  krb5_keyblock *key, unsigned usage,
211 		  u_char *sgn_cksum, size_t sgn_cksum_sz,
212 		  const u_char *v1, size_t l1,
213 		  const void *v2, size_t l2,
214 		  const void *v3, size_t l3)
215 {
216     gss_iov_buffer_desc iov;
217 
218     iov.type = GSS_IOV_BUFFER_TYPE_SIGN_ONLY;
219     iov.buffer.value = rk_UNCONST(v3);
220     iov.buffer.length = l3;
221 
222     return arcfour_mic_cksum_iov(context, key, usage,
223 				 sgn_cksum, sgn_cksum_sz,
224 				 v1, l1, v2, l2,
225 				 &iov, 1, NULL);
226 }
227 
228 
229 OM_uint32
230 _gssapi_get_mic_arcfour(OM_uint32 * minor_status,
231 			const gsskrb5_ctx context_handle,
232 			krb5_context context,
233 			gss_qop_t qop_req,
234 			const gss_buffer_t message_buffer,
235 			gss_buffer_t message_token,
236 			krb5_keyblock *key)
237 {
238     krb5_error_code ret;
239     int32_t seq_number;
240     size_t len, total_len;
241     u_char k6_data[16], *p0, *p;
242     EVP_CIPHER_CTX rc4_key;
243 
244     _gsskrb5_encap_length (22, &len, &total_len, GSS_KRB5_MECHANISM);
245 
246     message_token->length = total_len;
247     message_token->value  = malloc (total_len);
248     if (message_token->value == NULL) {
249 	*minor_status = ENOMEM;
250 	return GSS_S_FAILURE;
251     }
252 
253     p0 = _gssapi_make_mech_header(message_token->value,
254 				  len,
255 				  GSS_KRB5_MECHANISM);
256     p = p0;
257 
258     *p++ = 0x01; /* TOK_ID */
259     *p++ = 0x01;
260     *p++ = 0x11; /* SGN_ALG */
261     *p++ = 0x00;
262     *p++ = 0xff; /* Filler */
263     *p++ = 0xff;
264     *p++ = 0xff;
265     *p++ = 0xff;
266 
267     p = NULL;
268 
269     ret = arcfour_mic_cksum(context,
270 			    key, KRB5_KU_USAGE_SIGN,
271 			    p0 + 16, 8,  /* SGN_CKSUM */
272 			    p0, 8, /* TOK_ID, SGN_ALG, Filer */
273 			    message_buffer->value, message_buffer->length,
274 			    NULL, 0);
275     if (ret) {
276 	_gsskrb5_release_buffer(minor_status, message_token);
277 	*minor_status = ret;
278 	return GSS_S_FAILURE;
279     }
280 
281     ret = arcfour_mic_key(context, key,
282 			  p0 + 16, 8, /* SGN_CKSUM */
283 			  k6_data, sizeof(k6_data));
284     if (ret) {
285 	_gsskrb5_release_buffer(minor_status, message_token);
286 	*minor_status = ret;
287 	return GSS_S_FAILURE;
288     }
289 
290     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
291     krb5_auth_con_getlocalseqnumber (context,
292 				     context_handle->auth_context,
293 				     &seq_number);
294     p = p0 + 8; /* SND_SEQ */
295     _gsskrb5_encode_be_om_uint32(seq_number, p);
296 
297     krb5_auth_con_setlocalseqnumber (context,
298 				     context_handle->auth_context,
299 				     ++seq_number);
300     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
301 
302     memset (p + 4, (context_handle->more_flags & LOCAL) ? 0 : 0xff, 4);
303 
304     EVP_CIPHER_CTX_init(&rc4_key);
305     EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
306     EVP_Cipher(&rc4_key, p, p, 8);
307     EVP_CIPHER_CTX_cleanup(&rc4_key);
308 
309     memset(k6_data, 0, sizeof(k6_data));
310 
311     *minor_status = 0;
312     return GSS_S_COMPLETE;
313 }
314 
315 
316 OM_uint32
317 _gssapi_verify_mic_arcfour(OM_uint32 * minor_status,
318 			   const gsskrb5_ctx context_handle,
319 			   krb5_context context,
320 			   const gss_buffer_t message_buffer,
321 			   const gss_buffer_t token_buffer,
322 			   gss_qop_t * qop_state,
323 			   krb5_keyblock *key,
324 			   const char *type)
325 {
326     krb5_error_code ret;
327     uint32_t seq_number;
328     OM_uint32 omret;
329     u_char SND_SEQ[8], cksum_data[8], *p;
330     char k6_data[16];
331     int cmp;
332 
333     if (qop_state)
334 	*qop_state = 0;
335 
336     p = token_buffer->value;
337     omret = _gsskrb5_verify_header (&p,
338 				       token_buffer->length,
339 				       type,
340 				       GSS_KRB5_MECHANISM);
341     if (omret)
342 	return omret;
343 
344     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
345 	return GSS_S_BAD_SIG;
346     p += 2;
347     if (memcmp (p, "\xff\xff\xff\xff", 4) != 0)
348 	return GSS_S_BAD_MIC;
349     p += 4;
350 
351     ret = arcfour_mic_cksum(context,
352 			    key, KRB5_KU_USAGE_SIGN,
353 			    cksum_data, sizeof(cksum_data),
354 			    p - 8, 8,
355 			    message_buffer->value, message_buffer->length,
356 			    NULL, 0);
357     if (ret) {
358 	*minor_status = ret;
359 	return GSS_S_FAILURE;
360     }
361 
362     ret = arcfour_mic_key(context, key,
363 			  cksum_data, sizeof(cksum_data),
364 			  k6_data, sizeof(k6_data));
365     if (ret) {
366 	*minor_status = ret;
367 	return GSS_S_FAILURE;
368     }
369 
370     cmp = ct_memcmp(cksum_data, p + 8, 8);
371     if (cmp) {
372 	*minor_status = 0;
373 	return GSS_S_BAD_MIC;
374     }
375 
376     {
377 	EVP_CIPHER_CTX rc4_key;
378 
379 	EVP_CIPHER_CTX_init(&rc4_key);
380 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, (void *)k6_data, NULL, 0);
381 	EVP_Cipher(&rc4_key, SND_SEQ, p, 8);
382 	EVP_CIPHER_CTX_cleanup(&rc4_key);
383 
384 	memset(k6_data, 0, sizeof(k6_data));
385     }
386 
387     _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
388 
389     if (context_handle->more_flags & LOCAL)
390 	cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4);
391     else
392 	cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4);
393 
394     memset(SND_SEQ, 0, sizeof(SND_SEQ));
395     if (cmp != 0) {
396 	*minor_status = 0;
397 	return GSS_S_BAD_MIC;
398     }
399 
400     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
401     omret = _gssapi_msg_order_check(context_handle->order, seq_number);
402     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
403     if (omret)
404 	return omret;
405 
406     *minor_status = 0;
407     return GSS_S_COMPLETE;
408 }
409 
410 OM_uint32
411 _gssapi_wrap_arcfour(OM_uint32 * minor_status,
412 		     const gsskrb5_ctx context_handle,
413 		     krb5_context context,
414 		     int conf_req_flag,
415 		     gss_qop_t qop_req,
416 		     const gss_buffer_t input_message_buffer,
417 		     int * conf_state,
418 		     gss_buffer_t output_message_buffer,
419 		     krb5_keyblock *key)
420 {
421     u_char Klocaldata[16], k6_data[16], *p, *p0;
422     size_t len, total_len, datalen;
423     krb5_keyblock Klocal;
424     krb5_error_code ret;
425     int32_t seq_number;
426 
427     if (conf_state)
428 	*conf_state = 0;
429 
430     datalen = input_message_buffer->length;
431 
432     if (IS_DCE_STYLE(context_handle)) {
433 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
434 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
435 	total_len += datalen;
436     } else {
437 	datalen += 1; /* padding */
438 	len = datalen + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
439 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
440     }
441 
442     output_message_buffer->length = total_len;
443     output_message_buffer->value  = malloc (total_len);
444     if (output_message_buffer->value == NULL) {
445 	*minor_status = ENOMEM;
446 	return GSS_S_FAILURE;
447     }
448 
449     p0 = _gssapi_make_mech_header(output_message_buffer->value,
450 				  len,
451 				  GSS_KRB5_MECHANISM);
452     p = p0;
453 
454     *p++ = 0x02; /* TOK_ID */
455     *p++ = 0x01;
456     *p++ = 0x11; /* SGN_ALG */
457     *p++ = 0x00;
458     if (conf_req_flag) {
459 	*p++ = 0x10; /* SEAL_ALG */
460 	*p++ = 0x00;
461     } else {
462 	*p++ = 0xff; /* SEAL_ALG */
463 	*p++ = 0xff;
464     }
465     *p++ = 0xff; /* Filler */
466     *p++ = 0xff;
467 
468     p = NULL;
469 
470     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
471     krb5_auth_con_getlocalseqnumber (context,
472 				     context_handle->auth_context,
473 				     &seq_number);
474 
475     _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
476 
477     krb5_auth_con_setlocalseqnumber (context,
478 				     context_handle->auth_context,
479 				     ++seq_number);
480     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
481 
482     memset (p0 + 8 + 4,
483 	    (context_handle->more_flags & LOCAL) ? 0 : 0xff,
484 	    4);
485 
486     krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
487 
488     /* p points to data */
489     p = p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
490     memcpy(p, input_message_buffer->value, input_message_buffer->length);
491 
492     if (!IS_DCE_STYLE(context_handle))
493 	p[input_message_buffer->length] = 1; /* padding */
494 
495     ret = arcfour_mic_cksum(context,
496 			    key, KRB5_KU_USAGE_SEAL,
497 			    p0 + 16, 8, /* SGN_CKSUM */
498 			    p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
499 			    p0 + 24, 8, /* Confounder */
500 			    p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
501 			    datalen);
502     if (ret) {
503 	*minor_status = ret;
504 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
505 	return GSS_S_FAILURE;
506     }
507 
508     {
509 	int i;
510 
511 	Klocal.keytype = key->keytype;
512 	Klocal.keyvalue.data = Klocaldata;
513 	Klocal.keyvalue.length = sizeof(Klocaldata);
514 
515 	for (i = 0; i < 16; i++)
516 	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
517     }
518     ret = arcfour_mic_key(context, &Klocal,
519 			  p0 + 8, 4, /* SND_SEQ */
520 			  k6_data, sizeof(k6_data));
521     memset(Klocaldata, 0, sizeof(Klocaldata));
522     if (ret) {
523 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
524 	*minor_status = ret;
525 	return GSS_S_FAILURE;
526     }
527 
528 
529     if(conf_req_flag) {
530 	EVP_CIPHER_CTX rc4_key;
531 
532 	EVP_CIPHER_CTX_init(&rc4_key);
533 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
534 	EVP_Cipher(&rc4_key, p0 + 24, p0 + 24, 8 + datalen);
535 	EVP_CIPHER_CTX_cleanup(&rc4_key);
536     }
537     memset(k6_data, 0, sizeof(k6_data));
538 
539     ret = arcfour_mic_key(context, key,
540 			  p0 + 16, 8, /* SGN_CKSUM */
541 			  k6_data, sizeof(k6_data));
542     if (ret) {
543 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
544 	*minor_status = ret;
545 	return GSS_S_FAILURE;
546     }
547 
548     {
549 	EVP_CIPHER_CTX rc4_key;
550 
551 	EVP_CIPHER_CTX_init(&rc4_key);
552 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
553 	EVP_Cipher(&rc4_key, p0 + 8, p0 + 8 /* SND_SEQ */, 8);
554 	EVP_CIPHER_CTX_cleanup(&rc4_key);
555 	memset(k6_data, 0, sizeof(k6_data));
556     }
557 
558     if (conf_state)
559 	*conf_state = conf_req_flag;
560 
561     *minor_status = 0;
562     return GSS_S_COMPLETE;
563 }
564 
565 OM_uint32 _gssapi_unwrap_arcfour(OM_uint32 *minor_status,
566 				 const gsskrb5_ctx context_handle,
567 				 krb5_context context,
568 				 const gss_buffer_t input_message_buffer,
569 				 gss_buffer_t output_message_buffer,
570 				 int *conf_state,
571 				 gss_qop_t *qop_state,
572 				 krb5_keyblock *key)
573 {
574     u_char Klocaldata[16];
575     krb5_keyblock Klocal;
576     krb5_error_code ret;
577     uint32_t seq_number;
578     size_t datalen;
579     OM_uint32 omret;
580     u_char k6_data[16], SND_SEQ[8], Confounder[8];
581     u_char cksum_data[8];
582     u_char *p, *p0;
583     int cmp;
584     int conf_flag;
585     size_t padlen = 0, len;
586 
587     if (conf_state)
588 	*conf_state = 0;
589     if (qop_state)
590 	*qop_state = 0;
591 
592     p0 = input_message_buffer->value;
593 
594     if (IS_DCE_STYLE(context_handle)) {
595 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE +
596 	    GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE;
597 	if (input_message_buffer->length < len)
598 	    return GSS_S_BAD_MECH;
599     } else {
600 	len = input_message_buffer->length;
601     }
602 
603     omret = _gssapi_verify_mech_header(&p0,
604 				       len,
605 				       GSS_KRB5_MECHANISM);
606     if (omret)
607 	return omret;
608 
609     /* length of mech header */
610     len = (p0 - (u_char *)input_message_buffer->value) +
611 	GSS_ARCFOUR_WRAP_TOKEN_SIZE;
612 
613     if (len > input_message_buffer->length)
614 	return GSS_S_BAD_MECH;
615 
616     /* length of data */
617     datalen = input_message_buffer->length - len;
618 
619     p = p0;
620 
621     if (memcmp(p, "\x02\x01", 2) != 0)
622 	return GSS_S_BAD_SIG;
623     p += 2;
624     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
625 	return GSS_S_BAD_SIG;
626     p += 2;
627 
628     if (memcmp (p, "\x10\x00", 2) == 0)
629 	conf_flag = 1;
630     else if (memcmp (p, "\xff\xff", 2) == 0)
631 	conf_flag = 0;
632     else
633 	return GSS_S_BAD_SIG;
634 
635     p += 2;
636     if (memcmp (p, "\xff\xff", 2) != 0)
637 	return GSS_S_BAD_MIC;
638     p = NULL;
639 
640     ret = arcfour_mic_key(context, key,
641 			  p0 + 16, 8, /* SGN_CKSUM */
642 			  k6_data, sizeof(k6_data));
643     if (ret) {
644 	*minor_status = ret;
645 	return GSS_S_FAILURE;
646     }
647 
648     {
649 	EVP_CIPHER_CTX rc4_key;
650 
651 	EVP_CIPHER_CTX_init(&rc4_key);
652 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
653 	EVP_Cipher(&rc4_key, SND_SEQ, p0 + 8, 8);
654 	EVP_CIPHER_CTX_cleanup(&rc4_key);
655 	memset(k6_data, 0, sizeof(k6_data));
656     }
657 
658     _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
659 
660     if (context_handle->more_flags & LOCAL)
661 	cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4);
662     else
663 	cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4);
664 
665     if (cmp != 0) {
666 	*minor_status = 0;
667 	return GSS_S_BAD_MIC;
668     }
669 
670     {
671 	int i;
672 
673 	Klocal.keytype = key->keytype;
674 	Klocal.keyvalue.data = Klocaldata;
675 	Klocal.keyvalue.length = sizeof(Klocaldata);
676 
677 	for (i = 0; i < 16; i++)
678 	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
679     }
680     ret = arcfour_mic_key(context, &Klocal,
681 			  SND_SEQ, 4,
682 			  k6_data, sizeof(k6_data));
683     memset(Klocaldata, 0, sizeof(Klocaldata));
684     if (ret) {
685 	*minor_status = ret;
686 	return GSS_S_FAILURE;
687     }
688 
689     output_message_buffer->value = malloc(datalen);
690     if (output_message_buffer->value == NULL) {
691 	*minor_status = ENOMEM;
692 	return GSS_S_FAILURE;
693     }
694     output_message_buffer->length = datalen;
695 
696     if(conf_flag) {
697 	EVP_CIPHER_CTX rc4_key;
698 
699 	EVP_CIPHER_CTX_init(&rc4_key);
700 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
701 	EVP_Cipher(&rc4_key, Confounder, p0 + 24, 8);
702 	EVP_Cipher(&rc4_key, output_message_buffer->value, p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, datalen);
703 	EVP_CIPHER_CTX_cleanup(&rc4_key);
704     } else {
705 	memcpy(Confounder, p0 + 24, 8); /* Confounder */
706 	memcpy(output_message_buffer->value,
707 	       p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
708 	       datalen);
709     }
710     memset(k6_data, 0, sizeof(k6_data));
711 
712     if (!IS_DCE_STYLE(context_handle)) {
713 	ret = _gssapi_verify_pad(output_message_buffer, datalen, &padlen);
714 	if (ret) {
715 	    _gsskrb5_release_buffer(minor_status, output_message_buffer);
716 	    *minor_status = 0;
717 	    return ret;
718 	}
719 	output_message_buffer->length -= padlen;
720     }
721 
722     ret = arcfour_mic_cksum(context,
723 			    key, KRB5_KU_USAGE_SEAL,
724 			    cksum_data, sizeof(cksum_data),
725 			    p0, 8,
726 			    Confounder, sizeof(Confounder),
727 			    output_message_buffer->value,
728 			    output_message_buffer->length + padlen);
729     if (ret) {
730 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
731 	*minor_status = ret;
732 	return GSS_S_FAILURE;
733     }
734 
735     cmp = ct_memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */
736     if (cmp) {
737 	_gsskrb5_release_buffer(minor_status, output_message_buffer);
738 	*minor_status = 0;
739 	return GSS_S_BAD_MIC;
740     }
741 
742     HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
743     omret = _gssapi_msg_order_check(context_handle->order, seq_number);
744     HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
745     if (omret)
746 	return omret;
747 
748     if (conf_state)
749 	*conf_state = conf_flag;
750 
751     *minor_status = 0;
752     return GSS_S_COMPLETE;
753 }
754 
755 static OM_uint32
756 max_wrap_length_arcfour(const gsskrb5_ctx ctx,
757 			krb5_crypto crypto,
758 			size_t input_length,
759 			OM_uint32 *max_input_size)
760 {
761     /*
762      * if GSS_C_DCE_STYLE is in use:
763      *  - we only need to encapsulate the WRAP token
764      * However, since this is a fixed since, we just
765      */
766     if (IS_DCE_STYLE(ctx)) {
767 	size_t len, total_len;
768 
769 	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
770 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
771 
772 	if (input_length < len)
773 	    *max_input_size = 0;
774 	else
775 	    *max_input_size = input_length - len;
776 
777     } else {
778 	size_t extrasize = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
779 	size_t blocksize = 8;
780 	size_t len, total_len;
781 
782 	len = 8 + input_length + blocksize + extrasize;
783 
784 	_gsskrb5_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
785 
786 	total_len -= input_length; /* token length */
787 	if (total_len < input_length) {
788 	    *max_input_size = (input_length - total_len);
789 	    (*max_input_size) &= (~(OM_uint32)(blocksize - 1));
790 	} else {
791 	    *max_input_size = 0;
792 	}
793     }
794 
795     return GSS_S_COMPLETE;
796 }
797 
798 OM_uint32
799 _gssapi_wrap_size_arcfour(OM_uint32 *minor_status,
800 			  const gsskrb5_ctx ctx,
801 			  krb5_context context,
802 			  int conf_req_flag,
803 			  gss_qop_t qop_req,
804 			  OM_uint32 req_output_size,
805 			  OM_uint32 *max_input_size,
806 			  krb5_keyblock *key)
807 {
808     krb5_error_code ret;
809     krb5_crypto crypto;
810 
811     ret = krb5_crypto_init(context, key, 0, &crypto);
812     if (ret != 0) {
813 	*minor_status = ret;
814 	return GSS_S_FAILURE;
815     }
816 
817     ret = max_wrap_length_arcfour(ctx, crypto,
818 				  req_output_size, max_input_size);
819     if (ret != 0) {
820 	*minor_status = ret;
821 	krb5_crypto_destroy(context, crypto);
822 	return GSS_S_FAILURE;
823     }
824 
825     krb5_crypto_destroy(context, crypto);
826 
827     return GSS_S_COMPLETE;
828 }
829 
830 OM_uint32
831 _gssapi_wrap_iov_length_arcfour(OM_uint32 *minor_status,
832 				gsskrb5_ctx ctx,
833 				krb5_context context,
834 				int conf_req_flag,
835 				gss_qop_t qop_req,
836 				int *conf_state,
837 				gss_iov_buffer_desc *iov,
838 				int iov_count)
839 {
840     OM_uint32 major_status;
841     size_t data_len = 0;
842     int i;
843     gss_iov_buffer_desc *header = NULL;
844     gss_iov_buffer_desc *padding = NULL;
845     gss_iov_buffer_desc *trailer = NULL;
846 
847     *minor_status = 0;
848 
849     for (i = 0; i < iov_count; i++) {
850 	switch(GSS_IOV_BUFFER_TYPE(iov[i].type)) {
851 	case GSS_IOV_BUFFER_TYPE_EMPTY:
852 	    break;
853 	case GSS_IOV_BUFFER_TYPE_DATA:
854 	    data_len += iov[i].buffer.length;
855 	    break;
856 	case GSS_IOV_BUFFER_TYPE_HEADER:
857 	    if (header != NULL) {
858 		*minor_status = EINVAL;
859 		return GSS_S_FAILURE;
860 	    }
861 	    header = &iov[i];
862 	    break;
863 	case GSS_IOV_BUFFER_TYPE_TRAILER:
864 	    if (trailer != NULL) {
865 		*minor_status = EINVAL;
866 		return GSS_S_FAILURE;
867 	    }
868 	    trailer = &iov[i];
869 	    break;
870 	case GSS_IOV_BUFFER_TYPE_PADDING:
871 	    if (padding != NULL) {
872 		*minor_status = EINVAL;
873 		return GSS_S_FAILURE;
874 	    }
875 	    padding = &iov[i];
876 	    break;
877 	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
878 	    break;
879 	default:
880 	    *minor_status = EINVAL;
881 	    return GSS_S_FAILURE;
882 	}
883     }
884 
885     major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer);
886     if (major_status != GSS_S_COMPLETE) {
887 	    return major_status;
888     }
889 
890     if (IS_DCE_STYLE(ctx)) {
891 	size_t len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
892 	size_t total_len;
893 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
894 	header->buffer.length = total_len;
895     } else {
896 	size_t len;
897 	size_t total_len;
898 	if (padding) {
899 	    data_len += 1; /* padding */
900 	}
901 	len = data_len + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
902 	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
903 	header->buffer.length = total_len - data_len;
904     }
905 
906     if (trailer) {
907 	trailer->buffer.length = 0;
908     }
909 
910     if (padding) {
911 	padding->buffer.length = 1;
912     }
913 
914     return GSS_S_COMPLETE;
915 }
916 
917 OM_uint32
918 _gssapi_wrap_iov_arcfour(OM_uint32 *minor_status,
919 			 gsskrb5_ctx ctx,
920 			 krb5_context context,
921 			 int conf_req_flag,
922 			 int *conf_state,
923 			 gss_iov_buffer_desc *iov,
924 			 int iov_count,
925 			 krb5_keyblock *key)
926 {
927     OM_uint32 major_status, junk;
928     gss_iov_buffer_desc *header, *padding, *trailer;
929     krb5_error_code kret;
930     int32_t seq_number;
931     u_char Klocaldata[16], k6_data[16], *p, *p0;
932     size_t make_len = 0;
933     size_t header_len = 0;
934     size_t data_len = 0;
935     krb5_keyblock Klocal;
936     int i;
937 
938     header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
939     padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
940     trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
941 
942     major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer);
943     if (major_status != GSS_S_COMPLETE) {
944 	return major_status;
945     }
946 
947     for (i = 0; i < iov_count; i++) {
948 	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
949 	case GSS_IOV_BUFFER_TYPE_DATA:
950 	    break;
951 	default:
952 	    continue;
953 	}
954 
955 	data_len += iov[i].buffer.length;
956     }
957 
958     if (padding) {
959 	data_len += 1;
960     }
961 
962     if (IS_DCE_STYLE(ctx)) {
963 	size_t unwrapped_len;
964 	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
965 	_gssapi_encap_length(unwrapped_len,
966 			     &make_len,
967 			     &header_len,
968 			     GSS_KRB5_MECHANISM);
969     } else {
970 	size_t unwrapped_len;
971 	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + data_len;
972 	_gssapi_encap_length(unwrapped_len,
973 			     &make_len,
974 			     &header_len,
975 			     GSS_KRB5_MECHANISM);
976 	header_len -= data_len;
977     }
978 
979     if (GSS_IOV_BUFFER_FLAGS(header->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) {
980 	major_status = _gk_allocate_buffer(minor_status, header,
981 					   header_len);
982 	if (major_status != GSS_S_COMPLETE)
983 	    goto failure;
984     } else if (header->buffer.length < header_len) {
985 	*minor_status = KRB5_BAD_MSIZE;
986 	major_status = GSS_S_FAILURE;
987 	goto failure;
988     } else {
989 	header->buffer.length = header_len;
990     }
991 
992     if (padding) {
993 	if (GSS_IOV_BUFFER_FLAGS(padding->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) {
994 	    major_status = _gk_allocate_buffer(minor_status, padding, 1);
995 	    if (major_status != GSS_S_COMPLETE)
996 		goto failure;
997 	} else if (padding->buffer.length < 1) {
998 	    *minor_status = KRB5_BAD_MSIZE;
999 	    major_status = GSS_S_FAILURE;
1000 	    goto failure;
1001 	} else {
1002 	    padding->buffer.length = 1;
1003 	}
1004 	memset(padding->buffer.value, 1, 1);
1005     }
1006 
1007     if (trailer) {
1008 	trailer->buffer.length = 0;
1009 	trailer->buffer.value = NULL;
1010     }
1011 
1012     p0 = _gssapi_make_mech_header(header->buffer.value,
1013 				  make_len,
1014 				  GSS_KRB5_MECHANISM);
1015     p = p0;
1016 
1017     *p++ = 0x02; /* TOK_ID */
1018     *p++ = 0x01;
1019     *p++ = 0x11; /* SGN_ALG */
1020     *p++ = 0x00;
1021     if (conf_req_flag) {
1022 	*p++ = 0x10; /* SEAL_ALG */
1023 	*p++ = 0x00;
1024     } else {
1025 	*p++ = 0xff; /* SEAL_ALG */
1026 	*p++ = 0xff;
1027     }
1028     *p++ = 0xff; /* Filler */
1029     *p++ = 0xff;
1030 
1031     p = NULL;
1032 
1033     HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1034     krb5_auth_con_getlocalseqnumber(context,
1035 				    ctx->auth_context,
1036 				    &seq_number);
1037     _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
1038 
1039     krb5_auth_con_setlocalseqnumber(context,
1040 				    ctx->auth_context,
1041 				    ++seq_number);
1042     HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1043 
1044     memset(p0 + 8 + 4,
1045            (ctx->more_flags & LOCAL) ? 0 : 0xff,
1046            4);
1047 
1048     krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
1049 
1050     /* Sign Data */
1051     kret = arcfour_mic_cksum_iov(context,
1052 				 key, KRB5_KU_USAGE_SEAL,
1053 				 p0 + 16, 8, /* SGN_CKSUM */
1054 				 p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
1055 				 p0 + 24, 8, /* Confounder */
1056 				 iov, iov_count, /* Data + SignOnly */
1057 				 padding); /* padding */
1058     if (kret) {
1059 	*minor_status = kret;
1060 	major_status = GSS_S_FAILURE;
1061 	goto failure;
1062     }
1063 
1064     Klocal.keytype = key->keytype;
1065     Klocal.keyvalue.data = Klocaldata;
1066     Klocal.keyvalue.length = sizeof(Klocaldata);
1067 
1068     for (i = 0; i < 16; i++) {
1069 	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1070     }
1071     kret = arcfour_mic_key(context, &Klocal,
1072 			   p0 + 8, 4, /* SND_SEQ */
1073 			   k6_data, sizeof(k6_data));
1074     memset(Klocaldata, 0, sizeof(Klocaldata));
1075     if (kret) {
1076 	*minor_status = kret;
1077 	major_status = GSS_S_FAILURE;
1078 	goto failure;
1079     }
1080 
1081     if (conf_req_flag) {
1082 	EVP_CIPHER_CTX rc4_key;
1083 
1084 	EVP_CIPHER_CTX_init(&rc4_key);
1085 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1086 
1087 	/* Confounder */
1088 	EVP_Cipher(&rc4_key, p0 + 24, p0 + 24, 8);
1089 
1090 	/* Seal Data */
1091 	for (i=0; i < iov_count; i++) {
1092 	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1093 	    case GSS_IOV_BUFFER_TYPE_DATA:
1094 		break;
1095 	    default:
1096 		continue;
1097 	    }
1098 
1099 	    EVP_Cipher(&rc4_key, iov[i].buffer.value,
1100 		       iov[i].buffer.value, iov[i].buffer.length);
1101 	}
1102 
1103 	/* Padding */
1104 	if (padding) {
1105 	    EVP_Cipher(&rc4_key, padding->buffer.value,
1106 		       padding->buffer.value, padding->buffer.length);
1107 	}
1108 
1109 	EVP_CIPHER_CTX_cleanup(&rc4_key);
1110     }
1111     memset(k6_data, 0, sizeof(k6_data));
1112 
1113     kret = arcfour_mic_key(context, key,
1114 			   p0 + 16, 8, /* SGN_CKSUM */
1115 			   k6_data, sizeof(k6_data));
1116     if (kret) {
1117 	*minor_status = kret;
1118 	major_status = GSS_S_FAILURE;
1119         return major_status;
1120     }
1121 
1122     {
1123 	EVP_CIPHER_CTX rc4_key;
1124 
1125 	EVP_CIPHER_CTX_init(&rc4_key);
1126 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1127 	EVP_Cipher(&rc4_key, p0 + 8, p0 + 8, 8); /* SND_SEQ */
1128 	EVP_CIPHER_CTX_cleanup(&rc4_key);
1129 
1130 	memset(k6_data, 0, sizeof(k6_data));
1131     }
1132 
1133     if (conf_state)
1134 	*conf_state = conf_req_flag;
1135 
1136     *minor_status = 0;
1137     return GSS_S_COMPLETE;
1138 
1139 failure:
1140 
1141     gss_release_iov_buffer(&junk, iov, iov_count);
1142 
1143     return major_status;
1144 }
1145 
1146 OM_uint32
1147 _gssapi_unwrap_iov_arcfour(OM_uint32 *minor_status,
1148 			   gsskrb5_ctx ctx,
1149 			   krb5_context context,
1150 			   int *pconf_state,
1151 			   gss_qop_t *pqop_state,
1152 			   gss_iov_buffer_desc *iov,
1153 			   int iov_count,
1154 			   krb5_keyblock *key)
1155 {
1156     OM_uint32 major_status;
1157     gss_iov_buffer_desc *header, *padding, *trailer;
1158     krb5_keyblock Klocal;
1159     uint8_t Klocaldata[16];
1160     uint8_t k6_data[16], snd_seq[8], Confounder[8];
1161     uint8_t cksum_data[8];
1162     uint8_t *_p = NULL;
1163     const uint8_t *p, *p0;
1164     size_t verify_len = 0;
1165     uint32_t seq_number;
1166     size_t hlen = 0;
1167     int conf_state;
1168     int cmp;
1169     size_t i;
1170     krb5_error_code kret;
1171     OM_uint32 ret;
1172 
1173     if (pconf_state != NULL) {
1174 	*pconf_state = 0;
1175     }
1176     if (pqop_state != NULL) {
1177 	*pqop_state = 0;
1178     }
1179 
1180     header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
1181     padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
1182     trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
1183 
1184     /* Check if the packet is correct */
1185     major_status = _gk_verify_buffers(minor_status,
1186 				  ctx,
1187 				  header,
1188 				  padding,
1189 				  trailer);
1190     if (major_status != GSS_S_COMPLETE) {
1191 	return major_status;
1192     }
1193 
1194     if (padding != NULL && padding->buffer.length != 1) {
1195 	*minor_status = EINVAL;
1196 	return GSS_S_FAILURE;
1197     }
1198 
1199     if (IS_DCE_STYLE(context)) {
1200 	verify_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE +
1201 		     GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE;
1202 	if (header->buffer.length > verify_len) {
1203 	    return GSS_S_BAD_MECH;
1204 	}
1205     } else {
1206 	verify_len = header->buffer.length;
1207     }
1208     _p = header->buffer.value;
1209 
1210     ret = _gssapi_verify_mech_header(&_p,
1211 				     verify_len,
1212 				     GSS_KRB5_MECHANISM);
1213     if (ret) {
1214 	return ret;
1215     }
1216     p0 = _p;
1217 
1218     /* length of mech header */
1219     hlen = (p0 - (uint8_t *)header->buffer.value);
1220     hlen += GSS_ARCFOUR_WRAP_TOKEN_SIZE;
1221 
1222     if (hlen > header->buffer.length) {
1223 	return GSS_S_BAD_MECH;
1224     }
1225 
1226     p = p0;
1227 
1228     if (memcmp(p, "\x02\x01", 2) != 0)
1229 	return GSS_S_BAD_SIG;
1230     p += 2;
1231     if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
1232 	return GSS_S_BAD_SIG;
1233     p += 2;
1234 
1235     if (memcmp (p, "\x10\x00", 2) == 0)
1236 	conf_state = 1;
1237     else if (memcmp (p, "\xff\xff", 2) == 0)
1238 	conf_state = 0;
1239     else
1240 	return GSS_S_BAD_SIG;
1241 
1242     p += 2;
1243     if (memcmp (p, "\xff\xff", 2) != 0)
1244 	return GSS_S_BAD_MIC;
1245     p = NULL;
1246 
1247     kret = arcfour_mic_key(context,
1248 			   key,
1249 			   p0 + 16, /* SGN_CKSUM */
1250 			   8,       /* SGN_CKSUM_LEN */
1251 			   k6_data,
1252 			   sizeof(k6_data));
1253     if (kret) {
1254 	*minor_status = kret;
1255 	return GSS_S_FAILURE;
1256     }
1257 
1258     {
1259 	EVP_CIPHER_CTX rc4_key;
1260 
1261 	EVP_CIPHER_CTX_init(&rc4_key);
1262 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1263 	EVP_Cipher(&rc4_key, snd_seq, p0 + 8, 8); /* SND_SEQ */
1264 	EVP_CIPHER_CTX_cleanup(&rc4_key);
1265 
1266 	memset(k6_data, 0, sizeof(k6_data));
1267     }
1268 
1269     _gsskrb5_decode_be_om_uint32(snd_seq, &seq_number);
1270 
1271     if (ctx->more_flags & LOCAL) {
1272 	cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4);
1273     } else {
1274 	cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4);
1275     }
1276     if (cmp != 0) {
1277 	*minor_status = 0;
1278 	return GSS_S_BAD_MIC;
1279     }
1280 
1281     if (ctx->more_flags & LOCAL) {
1282 	cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4);
1283     } else {
1284 	cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4);
1285     }
1286     if (cmp != 0) {
1287 	*minor_status = 0;
1288 	return GSS_S_BAD_MIC;
1289     }
1290 
1291     /* keyblock */
1292     Klocal.keytype = key->keytype;
1293     Klocal.keyvalue.data = Klocaldata;
1294     Klocal.keyvalue.length = sizeof(Klocaldata);
1295 
1296     for (i = 0; i < 16; i++) {
1297 	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1298     }
1299 
1300     kret = arcfour_mic_key(context,
1301 			   &Klocal,
1302 			   snd_seq,
1303 			   4,
1304 			   k6_data, sizeof(k6_data));
1305     memset(Klocaldata, 0, sizeof(Klocaldata));
1306     if (kret) {
1307 	*minor_status = kret;
1308 	return GSS_S_FAILURE;
1309     }
1310 
1311     if (conf_state == 1) {
1312 	EVP_CIPHER_CTX rc4_key;
1313 
1314 	EVP_CIPHER_CTX_init(&rc4_key);
1315 	EVP_CipherInit_ex(&rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1);
1316 
1317 	/* Confounder */
1318 	EVP_Cipher(&rc4_key, Confounder, p0 + 24, 8);
1319 
1320 	/* Data */
1321 	for (i = 0; i < iov_count; i++) {
1322 	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1323 	    case GSS_IOV_BUFFER_TYPE_DATA:
1324 		break;
1325 	    default:
1326 		continue;
1327 	    }
1328 
1329 	    EVP_Cipher(&rc4_key, iov[i].buffer.value,
1330 		       iov[i].buffer.value, iov[i].buffer.length);
1331 	}
1332 
1333 	/* Padding */
1334 	if (padding) {
1335 	    EVP_Cipher(&rc4_key, padding->buffer.value,
1336 		       padding->buffer.value, padding->buffer.length);
1337 	}
1338 
1339 	EVP_CIPHER_CTX_cleanup(&rc4_key);
1340     } else {
1341 	/* Confounder */
1342 	memcpy(Confounder, p0 + 24, 8);
1343     }
1344     memset(k6_data, 0, sizeof(k6_data));
1345 
1346     /* Prepare the buffer for signing */
1347     kret = arcfour_mic_cksum_iov(context,
1348 				 key, KRB5_KU_USAGE_SEAL,
1349 				 cksum_data, sizeof(cksum_data),
1350 				 p0, 8,
1351 				 Confounder, sizeof(Confounder),
1352 				 iov, iov_count,
1353 				 padding);
1354     if (kret) {
1355 	*minor_status = kret;
1356 	return GSS_S_FAILURE;
1357     }
1358 
1359     cmp = memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */
1360     if (cmp != 0) {
1361 	*minor_status = 0;
1362 	return GSS_S_BAD_MIC;
1363     }
1364 
1365     if (padding) {
1366 	size_t plen;
1367 
1368 	ret = _gssapi_verify_pad(&padding->buffer, 1, &plen);
1369 	if (ret) {
1370 	    *minor_status = 0;
1371 	    return ret;
1372 	}
1373     }
1374 
1375     HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1376     ret = _gssapi_msg_order_check(ctx->order, seq_number);
1377     HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1378     if (ret != 0) {
1379 	return ret;
1380     }
1381 
1382     if (pconf_state) {
1383 	*pconf_state = conf_state;
1384     }
1385 
1386     *minor_status = 0;
1387     return GSS_S_COMPLETE;
1388 }
1389