1 /* $NetBSD: arcfour.c,v 1.5 2023/06/01 20:40:18 christos Exp $ */ 2 3 /* 4 * Copyright (c) 2003 - 2006 Kungliga Tekniska Högskolan 5 * (Royal Institute of Technology, Stockholm, Sweden). 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 12 * 1. Redistributions of source code must retain the above copyright 13 * notice, this list of conditions and the following disclaimer. 14 * 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * 3. Neither the name of the Institute nor the names of its contributors 20 * may be used to endorse or promote products derived from this software 21 * without specific prior written permission. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND 24 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 25 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 26 * ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE 27 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 28 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 29 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 30 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 31 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 32 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 33 * SUCH DAMAGE. 34 */ 35 36 #include "gsskrb5_locl.h" 37 38 /* 39 * Implements draft-brezak-win2k-krb-rc4-hmac-04.txt 40 * 41 * The arcfour message have the following formats: 42 * 43 * MIC token 44 * TOK_ID[2] = 01 01 45 * SGN_ALG[2] = 11 00 46 * Filler[4] 47 * SND_SEQ[8] 48 * SGN_CKSUM[8] 49 * 50 * WRAP token 51 * TOK_ID[2] = 02 01 52 * SGN_ALG[2]; 53 * SEAL_ALG[2] 54 * Filler[2] 55 * SND_SEQ[2] 56 * SGN_CKSUM[8] 57 * Confounder[8] 58 */ 59 60 /* 61 * WRAP in DCE-style have a fixed size header, the oid and length over 62 * the WRAP header is a total of 63 * GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE + 64 * GSS_ARCFOUR_WRAP_TOKEN_SIZE byte (ie total of 45 bytes overhead, 65 * remember the 2 bytes from APPL [0] SEQ). 66 */ 67 68 #define GSS_ARCFOUR_WRAP_TOKEN_SIZE 32 69 #define GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE 13 70 71 72 static krb5_error_code 73 arcfour_mic_key(krb5_context context, krb5_keyblock *key, 74 const void *cksum_data, size_t cksum_size, 75 void *key6_data, size_t key6_size) 76 { 77 krb5_error_code ret; 78 79 Checksum cksum_k5; 80 krb5_keyblock key5; 81 char k5_data[16]; 82 83 Checksum cksum_k6; 84 85 char T[4]; 86 87 memset(T, 0, 4); 88 cksum_k5.checksum.data = k5_data; 89 cksum_k5.checksum.length = sizeof(k5_data); 90 91 if (key->keytype == KRB5_ENCTYPE_ARCFOUR_HMAC_MD5_56) { 92 char L40[14] = "fortybits"; 93 94 memcpy(L40 + 10, T, sizeof(T)); 95 ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5, 96 L40, 14, 0, key, &cksum_k5); 97 memset(&k5_data[7], 0xAB, 9); 98 } else { 99 ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5, 100 T, 4, 0, key, &cksum_k5); 101 } 102 if (ret) 103 return ret; 104 105 key5.keytype = KRB5_ENCTYPE_ARCFOUR_HMAC_MD5; 106 key5.keyvalue = cksum_k5.checksum; 107 108 cksum_k6.checksum.data = key6_data; 109 cksum_k6.checksum.length = key6_size; 110 111 return krb5_hmac(context, CKSUMTYPE_RSA_MD5, 112 cksum_data, cksum_size, 0, &key5, &cksum_k6); 113 } 114 115 116 static krb5_error_code 117 arcfour_mic_cksum_iov(krb5_context context, 118 krb5_keyblock *key, unsigned usage, 119 u_char *sgn_cksum, size_t sgn_cksum_sz, 120 const u_char *v1, size_t l1, 121 const void *v2, size_t l2, 122 const gss_iov_buffer_desc *iov, 123 int iov_count, 124 const gss_iov_buffer_desc *padding) 125 { 126 Checksum CKSUM; 127 u_char *ptr; 128 size_t len; 129 size_t ofs = 0; 130 int i; 131 krb5_crypto crypto; 132 krb5_error_code ret; 133 134 assert(sgn_cksum_sz == 8); 135 136 len = l1 + l2; 137 138 for (i=0; i < iov_count; i++) { 139 switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) { 140 case GSS_IOV_BUFFER_TYPE_DATA: 141 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY: 142 break; 143 default: 144 continue; 145 } 146 147 len += iov[i].buffer.length; 148 } 149 150 if (padding) { 151 len += padding->buffer.length; 152 } 153 154 ptr = malloc(len); 155 if (ptr == NULL) 156 return ENOMEM; 157 158 memcpy(ptr + ofs, v1, l1); 159 ofs += l1; 160 memcpy(ptr + ofs, v2, l2); 161 ofs += l2; 162 163 for (i=0; i < iov_count; i++) { 164 switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) { 165 case GSS_IOV_BUFFER_TYPE_DATA: 166 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY: 167 break; 168 default: 169 continue; 170 } 171 172 memcpy(ptr + ofs, 173 iov[i].buffer.value, 174 iov[i].buffer.length); 175 ofs += iov[i].buffer.length; 176 } 177 178 if (padding) { 179 memcpy(ptr + ofs, 180 padding->buffer.value, 181 padding->buffer.length); 182 ofs += padding->buffer.length; 183 } 184 185 ret = krb5_crypto_init(context, key, 0, &crypto); 186 if (ret) { 187 free(ptr); 188 return ret; 189 } 190 191 ret = krb5_create_checksum(context, 192 crypto, 193 usage, 194 0, 195 ptr, len, 196 &CKSUM); 197 memset(ptr, 0, len); 198 free(ptr); 199 if (ret == 0) { 200 memcpy(sgn_cksum, CKSUM.checksum.data, sgn_cksum_sz); 201 free_Checksum(&CKSUM); 202 } 203 krb5_crypto_destroy(context, crypto); 204 205 return ret; 206 } 207 208 static krb5_error_code 209 arcfour_mic_cksum(krb5_context context, 210 krb5_keyblock *key, unsigned usage, 211 u_char *sgn_cksum, size_t sgn_cksum_sz, 212 const u_char *v1, size_t l1, 213 const void *v2, size_t l2, 214 const void *v3, size_t l3) 215 { 216 gss_iov_buffer_desc iov; 217 218 iov.type = GSS_IOV_BUFFER_TYPE_SIGN_ONLY; 219 iov.buffer.value = rk_UNCONST(v3); 220 iov.buffer.length = l3; 221 222 return arcfour_mic_cksum_iov(context, key, usage, 223 sgn_cksum, sgn_cksum_sz, 224 v1, l1, v2, l2, 225 &iov, 1, NULL); 226 } 227 228 229 OM_uint32 230 _gssapi_get_mic_arcfour(OM_uint32 * minor_status, 231 const gsskrb5_ctx context_handle, 232 krb5_context context, 233 gss_qop_t qop_req, 234 const gss_buffer_t message_buffer, 235 gss_buffer_t message_token, 236 krb5_keyblock *key) 237 { 238 krb5_error_code ret; 239 int32_t seq_number; 240 size_t len, total_len; 241 u_char k6_data[16], *p0, *p; 242 EVP_CIPHER_CTX *rc4_key; 243 244 _gsskrb5_encap_length (22, &len, &total_len, GSS_KRB5_MECHANISM); 245 246 message_token->length = total_len; 247 message_token->value = malloc (total_len); 248 if (message_token->value == NULL) { 249 *minor_status = ENOMEM; 250 return GSS_S_FAILURE; 251 } 252 253 p0 = _gssapi_make_mech_header(message_token->value, 254 len, 255 GSS_KRB5_MECHANISM); 256 p = p0; 257 258 *p++ = 0x01; /* TOK_ID */ 259 *p++ = 0x01; 260 *p++ = 0x11; /* SGN_ALG */ 261 *p++ = 0x00; 262 *p++ = 0xff; /* Filler */ 263 *p++ = 0xff; 264 *p++ = 0xff; 265 *p++ = 0xff; 266 267 p = NULL; 268 269 ret = arcfour_mic_cksum(context, 270 key, KRB5_KU_USAGE_SIGN, 271 p0 + 16, 8, /* SGN_CKSUM */ 272 p0, 8, /* TOK_ID, SGN_ALG, Filer */ 273 message_buffer->value, message_buffer->length, 274 NULL, 0); 275 if (ret) { 276 _gsskrb5_release_buffer(minor_status, message_token); 277 *minor_status = ret; 278 return GSS_S_FAILURE; 279 } 280 281 ret = arcfour_mic_key(context, key, 282 p0 + 16, 8, /* SGN_CKSUM */ 283 k6_data, sizeof(k6_data)); 284 if (ret) { 285 _gsskrb5_release_buffer(minor_status, message_token); 286 *minor_status = ret; 287 return GSS_S_FAILURE; 288 } 289 290 HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex); 291 krb5_auth_con_getlocalseqnumber (context, 292 context_handle->auth_context, 293 &seq_number); 294 p = p0 + 8; /* SND_SEQ */ 295 _gsskrb5_encode_be_om_uint32(seq_number, p); 296 297 krb5_auth_con_setlocalseqnumber (context, 298 context_handle->auth_context, 299 ++seq_number); 300 HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex); 301 302 memset (p + 4, (context_handle->more_flags & LOCAL) ? 0 : 0xff, 4); 303 304 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 305 EVP_CIPHER_CTX rc4_keys; 306 rc4_key = &rc4_keys; 307 EVP_CIPHER_CTX_init(rc4_key); 308 #else 309 rc4_key = EVP_CIPHER_CTX_new(); 310 #endif 311 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 312 *minor_status = EINVAL; 313 return GSS_S_FAILURE; 314 } 315 316 EVP_Cipher(rc4_key, p, p, 8); 317 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 318 EVP_CIPHER_CTX_cleanup(rc4_key); 319 #else 320 EVP_CIPHER_CTX_free(rc4_key); 321 #endif 322 323 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 324 325 *minor_status = 0; 326 return GSS_S_COMPLETE; 327 } 328 329 330 OM_uint32 331 _gssapi_verify_mic_arcfour(OM_uint32 * minor_status, 332 const gsskrb5_ctx context_handle, 333 krb5_context context, 334 const gss_buffer_t message_buffer, 335 const gss_buffer_t token_buffer, 336 gss_qop_t * qop_state, 337 krb5_keyblock *key, 338 const char *type) 339 { 340 krb5_error_code ret; 341 uint32_t seq_number; 342 OM_uint32 omret; 343 u_char SND_SEQ[8], cksum_data[8], *p; 344 char k6_data[16]; 345 int cmp; 346 347 if (qop_state) 348 *qop_state = 0; 349 350 p = token_buffer->value; 351 omret = _gsskrb5_verify_header (&p, 352 token_buffer->length, 353 type, 354 GSS_KRB5_MECHANISM); 355 if (omret) 356 return omret; 357 358 if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */ 359 return GSS_S_BAD_SIG; 360 p += 2; 361 if (memcmp (p, "\xff\xff\xff\xff", 4) != 0) 362 return GSS_S_BAD_MIC; 363 p += 4; 364 365 ret = arcfour_mic_cksum(context, 366 key, KRB5_KU_USAGE_SIGN, 367 cksum_data, sizeof(cksum_data), 368 p - 8, 8, 369 message_buffer->value, message_buffer->length, 370 NULL, 0); 371 if (ret) { 372 *minor_status = ret; 373 return GSS_S_FAILURE; 374 } 375 376 ret = arcfour_mic_key(context, key, 377 cksum_data, sizeof(cksum_data), 378 k6_data, sizeof(k6_data)); 379 if (ret) { 380 *minor_status = ret; 381 return GSS_S_FAILURE; 382 } 383 384 cmp = ct_memcmp(cksum_data, p + 8, 8); 385 if (cmp) { 386 *minor_status = 0; 387 return GSS_S_BAD_MIC; 388 } 389 390 { 391 EVP_CIPHER_CTX *rc4_key; 392 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 393 EVP_CIPHER_CTX rc4_keys; 394 rc4_key = &rc4_keys; 395 EVP_CIPHER_CTX_init(rc4_key); 396 #else 397 rc4_key = EVP_CIPHER_CTX_new(); 398 #endif 399 400 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, (void *)k6_data, NULL, 401 0)) { 402 *minor_status = EINVAL; 403 return GSS_S_FAILURE; 404 } 405 EVP_Cipher(rc4_key, SND_SEQ, p, 8); 406 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 407 EVP_CIPHER_CTX_cleanup(rc4_key); 408 #else 409 EVP_CIPHER_CTX_free(rc4_key); 410 #endif 411 412 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 413 } 414 415 _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number); 416 417 if (context_handle->more_flags & LOCAL) 418 cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4); 419 else 420 cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4); 421 422 memset_s(SND_SEQ, sizeof(SND_SEQ), 0, sizeof(SND_SEQ)); 423 if (cmp != 0) { 424 *minor_status = 0; 425 return GSS_S_BAD_MIC; 426 } 427 428 HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex); 429 omret = _gssapi_msg_order_check(context_handle->order, seq_number); 430 HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex); 431 if (omret) 432 return omret; 433 434 *minor_status = 0; 435 return GSS_S_COMPLETE; 436 } 437 438 OM_uint32 439 _gssapi_wrap_arcfour(OM_uint32 * minor_status, 440 const gsskrb5_ctx context_handle, 441 krb5_context context, 442 int conf_req_flag, 443 gss_qop_t qop_req, 444 const gss_buffer_t input_message_buffer, 445 int * conf_state, 446 gss_buffer_t output_message_buffer, 447 krb5_keyblock *key) 448 { 449 u_char Klocaldata[16], k6_data[16], *p, *p0; 450 size_t len, total_len, datalen; 451 krb5_keyblock Klocal; 452 krb5_error_code ret; 453 int32_t seq_number; 454 455 if (conf_state) 456 *conf_state = 0; 457 458 datalen = input_message_buffer->length; 459 460 if (IS_DCE_STYLE(context_handle)) { 461 len = GSS_ARCFOUR_WRAP_TOKEN_SIZE; 462 _gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 463 total_len += datalen; 464 } else { 465 datalen += 1; /* padding */ 466 len = datalen + GSS_ARCFOUR_WRAP_TOKEN_SIZE; 467 _gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 468 } 469 470 output_message_buffer->length = total_len; 471 output_message_buffer->value = malloc (total_len); 472 if (output_message_buffer->value == NULL) { 473 *minor_status = ENOMEM; 474 return GSS_S_FAILURE; 475 } 476 477 p0 = _gssapi_make_mech_header(output_message_buffer->value, 478 len, 479 GSS_KRB5_MECHANISM); 480 p = p0; 481 482 *p++ = 0x02; /* TOK_ID */ 483 *p++ = 0x01; 484 *p++ = 0x11; /* SGN_ALG */ 485 *p++ = 0x00; 486 if (conf_req_flag) { 487 *p++ = 0x10; /* SEAL_ALG */ 488 *p++ = 0x00; 489 } else { 490 *p++ = 0xff; /* SEAL_ALG */ 491 *p++ = 0xff; 492 } 493 *p++ = 0xff; /* Filler */ 494 *p++ = 0xff; 495 496 p = NULL; 497 498 HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex); 499 krb5_auth_con_getlocalseqnumber (context, 500 context_handle->auth_context, 501 &seq_number); 502 503 _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8); 504 505 krb5_auth_con_setlocalseqnumber (context, 506 context_handle->auth_context, 507 ++seq_number); 508 HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex); 509 510 memset (p0 + 8 + 4, 511 (context_handle->more_flags & LOCAL) ? 0 : 0xff, 512 4); 513 514 krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */ 515 516 /* p points to data */ 517 p = p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE; 518 memcpy(p, input_message_buffer->value, input_message_buffer->length); 519 520 if (!IS_DCE_STYLE(context_handle)) 521 p[input_message_buffer->length] = 1; /* padding */ 522 523 ret = arcfour_mic_cksum(context, 524 key, KRB5_KU_USAGE_SEAL, 525 p0 + 16, 8, /* SGN_CKSUM */ 526 p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */ 527 p0 + 24, 8, /* Confounder */ 528 p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, 529 datalen); 530 if (ret) { 531 *minor_status = ret; 532 _gsskrb5_release_buffer(minor_status, output_message_buffer); 533 return GSS_S_FAILURE; 534 } 535 536 { 537 int i; 538 539 Klocal.keytype = key->keytype; 540 Klocal.keyvalue.data = Klocaldata; 541 Klocal.keyvalue.length = sizeof(Klocaldata); 542 543 for (i = 0; i < 16; i++) 544 Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0; 545 } 546 ret = arcfour_mic_key(context, &Klocal, 547 p0 + 8, 4, /* SND_SEQ */ 548 k6_data, sizeof(k6_data)); 549 memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata)); 550 if (ret) { 551 _gsskrb5_release_buffer(minor_status, output_message_buffer); 552 *minor_status = ret; 553 return GSS_S_FAILURE; 554 } 555 556 557 if(conf_req_flag) { 558 EVP_CIPHER_CTX *rc4_key; 559 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 560 EVP_CIPHER_CTX rc4_keys; 561 rc4_key = &rc4_keys; 562 EVP_CIPHER_CTX_init(rc4_key); 563 #else 564 rc4_key = EVP_CIPHER_CTX_new(); 565 #endif 566 567 EVP_CIPHER_CTX_init(rc4_key); 568 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 569 *minor_status = EINVAL; 570 return GSS_S_FAILURE; 571 } 572 EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8 + datalen); 573 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 574 EVP_CIPHER_CTX_cleanup(rc4_key); 575 #else 576 EVP_CIPHER_CTX_free(rc4_key); 577 #endif 578 } 579 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 580 581 ret = arcfour_mic_key(context, key, 582 p0 + 16, 8, /* SGN_CKSUM */ 583 k6_data, sizeof(k6_data)); 584 if (ret) { 585 _gsskrb5_release_buffer(minor_status, output_message_buffer); 586 *minor_status = ret; 587 return GSS_S_FAILURE; 588 } 589 590 { 591 EVP_CIPHER_CTX *rc4_key; 592 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 593 EVP_CIPHER_CTX rc4_keys; 594 rc4_key = &rc4_keys; 595 EVP_CIPHER_CTX_init(rc4_key); 596 #else 597 rc4_key = EVP_CIPHER_CTX_new(); 598 #endif 599 600 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 601 *minor_status = EINVAL; 602 return GSS_S_FAILURE; 603 } 604 EVP_Cipher(rc4_key, p0 + 8, p0 + 8 /* SND_SEQ */, 8); 605 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 606 EVP_CIPHER_CTX_cleanup(rc4_key); 607 #else 608 EVP_CIPHER_CTX_free(rc4_key); 609 #endif 610 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 611 } 612 613 if (conf_state) 614 *conf_state = conf_req_flag; 615 616 *minor_status = 0; 617 return GSS_S_COMPLETE; 618 } 619 620 OM_uint32 _gssapi_unwrap_arcfour(OM_uint32 *minor_status, 621 const gsskrb5_ctx context_handle, 622 krb5_context context, 623 const gss_buffer_t input_message_buffer, 624 gss_buffer_t output_message_buffer, 625 int *conf_state, 626 gss_qop_t *qop_state, 627 krb5_keyblock *key) 628 { 629 u_char Klocaldata[16]; 630 krb5_keyblock Klocal; 631 krb5_error_code ret; 632 uint32_t seq_number; 633 size_t datalen; 634 OM_uint32 omret; 635 u_char k6_data[16], SND_SEQ[8], Confounder[8]; 636 u_char cksum_data[8]; 637 u_char *p, *p0; 638 int cmp; 639 int conf_flag; 640 size_t padlen = 0, len; 641 642 if (conf_state) 643 *conf_state = 0; 644 if (qop_state) 645 *qop_state = 0; 646 647 p0 = input_message_buffer->value; 648 649 if (IS_DCE_STYLE(context_handle)) { 650 len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + 651 GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE; 652 if (input_message_buffer->length < len) 653 return GSS_S_BAD_MECH; 654 } else { 655 len = input_message_buffer->length; 656 } 657 658 omret = _gssapi_verify_mech_header(&p0, 659 len, 660 GSS_KRB5_MECHANISM); 661 if (omret) 662 return omret; 663 664 /* length of mech header */ 665 len = (p0 - (u_char *)input_message_buffer->value) + 666 GSS_ARCFOUR_WRAP_TOKEN_SIZE; 667 668 if (len > input_message_buffer->length) 669 return GSS_S_BAD_MECH; 670 671 /* length of data */ 672 datalen = input_message_buffer->length - len; 673 674 p = p0; 675 676 if (memcmp(p, "\x02\x01", 2) != 0) 677 return GSS_S_BAD_SIG; 678 p += 2; 679 if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */ 680 return GSS_S_BAD_SIG; 681 p += 2; 682 683 if (memcmp (p, "\x10\x00", 2) == 0) 684 conf_flag = 1; 685 else if (memcmp (p, "\xff\xff", 2) == 0) 686 conf_flag = 0; 687 else 688 return GSS_S_BAD_SIG; 689 690 p += 2; 691 if (memcmp (p, "\xff\xff", 2) != 0) 692 return GSS_S_BAD_MIC; 693 p = NULL; 694 695 ret = arcfour_mic_key(context, key, 696 p0 + 16, 8, /* SGN_CKSUM */ 697 k6_data, sizeof(k6_data)); 698 if (ret) { 699 *minor_status = ret; 700 return GSS_S_FAILURE; 701 } 702 703 { 704 EVP_CIPHER_CTX *rc4_key; 705 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 706 EVP_CIPHER_CTX rc4_keys; 707 rc4_key = &rc4_keys; 708 EVP_CIPHER_CTX_init(rc4_key); 709 #else 710 rc4_key = EVP_CIPHER_CTX_new(); 711 #endif 712 713 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 714 *minor_status = EINVAL; 715 return GSS_S_FAILURE; 716 } 717 EVP_Cipher(rc4_key, SND_SEQ, p0 + 8, 8); 718 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 719 EVP_CIPHER_CTX_cleanup(rc4_key); 720 #else 721 EVP_CIPHER_CTX_free(rc4_key); 722 #endif 723 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 724 } 725 726 _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number); 727 728 if (context_handle->more_flags & LOCAL) 729 cmp = memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4); 730 else 731 cmp = memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4); 732 733 if (cmp != 0) { 734 *minor_status = 0; 735 return GSS_S_BAD_MIC; 736 } 737 738 { 739 int i; 740 741 Klocal.keytype = key->keytype; 742 Klocal.keyvalue.data = Klocaldata; 743 Klocal.keyvalue.length = sizeof(Klocaldata); 744 745 for (i = 0; i < 16; i++) 746 Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0; 747 } 748 ret = arcfour_mic_key(context, &Klocal, 749 SND_SEQ, 4, 750 k6_data, sizeof(k6_data)); 751 memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata)); 752 if (ret) { 753 *minor_status = ret; 754 return GSS_S_FAILURE; 755 } 756 757 output_message_buffer->value = malloc(datalen); 758 if (output_message_buffer->value == NULL) { 759 *minor_status = ENOMEM; 760 return GSS_S_FAILURE; 761 } 762 output_message_buffer->length = datalen; 763 764 if(conf_flag) { 765 EVP_CIPHER_CTX *rc4_key; 766 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 767 EVP_CIPHER_CTX rc4_keys; 768 rc4_key = &rc4_keys; 769 EVP_CIPHER_CTX_init(rc4_key); 770 #else 771 rc4_key = EVP_CIPHER_CTX_new(); 772 #endif 773 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 774 *minor_status = EINVAL; 775 return GSS_S_FAILURE; 776 } 777 EVP_Cipher(rc4_key, Confounder, p0 + 24, 8); 778 EVP_Cipher(rc4_key, output_message_buffer->value, p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, datalen); 779 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 780 EVP_CIPHER_CTX_cleanup(rc4_key); 781 #else 782 EVP_CIPHER_CTX_free(rc4_key); 783 #endif 784 } else { 785 memcpy(Confounder, p0 + 24, 8); /* Confounder */ 786 memcpy(output_message_buffer->value, 787 p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, 788 datalen); 789 } 790 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 791 792 if (!IS_DCE_STYLE(context_handle)) { 793 ret = _gssapi_verify_pad(output_message_buffer, datalen, &padlen); 794 if (ret) { 795 _gsskrb5_release_buffer(minor_status, output_message_buffer); 796 *minor_status = 0; 797 return ret; 798 } 799 output_message_buffer->length -= padlen; 800 } 801 802 ret = arcfour_mic_cksum(context, 803 key, KRB5_KU_USAGE_SEAL, 804 cksum_data, sizeof(cksum_data), 805 p0, 8, 806 Confounder, sizeof(Confounder), 807 output_message_buffer->value, 808 output_message_buffer->length + padlen); 809 if (ret) { 810 _gsskrb5_release_buffer(minor_status, output_message_buffer); 811 *minor_status = ret; 812 return GSS_S_FAILURE; 813 } 814 815 cmp = ct_memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */ 816 if (cmp) { 817 _gsskrb5_release_buffer(minor_status, output_message_buffer); 818 *minor_status = 0; 819 return GSS_S_BAD_MIC; 820 } 821 822 HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex); 823 omret = _gssapi_msg_order_check(context_handle->order, seq_number); 824 HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex); 825 if (omret) 826 return omret; 827 828 if (conf_state) 829 *conf_state = conf_flag; 830 831 *minor_status = 0; 832 return GSS_S_COMPLETE; 833 } 834 835 static OM_uint32 836 max_wrap_length_arcfour(const gsskrb5_ctx ctx, 837 krb5_crypto crypto, 838 size_t input_length, 839 OM_uint32 *max_input_size) 840 { 841 /* 842 * if GSS_C_DCE_STYLE is in use: 843 * - we only need to encapsulate the WRAP token 844 * However, since this is a fixed since, we just 845 */ 846 if (IS_DCE_STYLE(ctx)) { 847 size_t len, total_len; 848 849 len = GSS_ARCFOUR_WRAP_TOKEN_SIZE; 850 _gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 851 852 if (input_length < len) 853 *max_input_size = 0; 854 else 855 *max_input_size = input_length - len; 856 857 } else { 858 size_t extrasize = GSS_ARCFOUR_WRAP_TOKEN_SIZE; 859 size_t blocksize = 8; 860 size_t len, total_len; 861 862 len = 8 + input_length + blocksize + extrasize; 863 864 _gsskrb5_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 865 866 total_len -= input_length; /* token length */ 867 if (total_len < input_length) { 868 *max_input_size = (input_length - total_len); 869 (*max_input_size) &= (~(OM_uint32)(blocksize - 1)); 870 } else { 871 *max_input_size = 0; 872 } 873 } 874 875 return GSS_S_COMPLETE; 876 } 877 878 OM_uint32 879 _gssapi_wrap_size_arcfour(OM_uint32 *minor_status, 880 const gsskrb5_ctx ctx, 881 krb5_context context, 882 int conf_req_flag, 883 gss_qop_t qop_req, 884 OM_uint32 req_output_size, 885 OM_uint32 *max_input_size, 886 krb5_keyblock *key) 887 { 888 krb5_error_code ret; 889 krb5_crypto crypto; 890 891 ret = krb5_crypto_init(context, key, 0, &crypto); 892 if (ret != 0) { 893 *minor_status = ret; 894 return GSS_S_FAILURE; 895 } 896 897 ret = max_wrap_length_arcfour(ctx, crypto, 898 req_output_size, max_input_size); 899 if (ret != 0) { 900 *minor_status = ret; 901 krb5_crypto_destroy(context, crypto); 902 return GSS_S_FAILURE; 903 } 904 905 krb5_crypto_destroy(context, crypto); 906 907 return GSS_S_COMPLETE; 908 } 909 910 OM_uint32 911 _gssapi_wrap_iov_length_arcfour(OM_uint32 *minor_status, 912 gsskrb5_ctx ctx, 913 krb5_context context, 914 int conf_req_flag, 915 gss_qop_t qop_req, 916 int *conf_state, 917 gss_iov_buffer_desc *iov, 918 int iov_count) 919 { 920 OM_uint32 major_status; 921 size_t data_len = 0; 922 int i; 923 gss_iov_buffer_desc *header = NULL; 924 gss_iov_buffer_desc *padding = NULL; 925 gss_iov_buffer_desc *trailer = NULL; 926 927 *minor_status = 0; 928 929 for (i = 0; i < iov_count; i++) { 930 switch(GSS_IOV_BUFFER_TYPE(iov[i].type)) { 931 case GSS_IOV_BUFFER_TYPE_EMPTY: 932 break; 933 case GSS_IOV_BUFFER_TYPE_DATA: 934 data_len += iov[i].buffer.length; 935 break; 936 case GSS_IOV_BUFFER_TYPE_HEADER: 937 if (header != NULL) { 938 *minor_status = EINVAL; 939 return GSS_S_FAILURE; 940 } 941 header = &iov[i]; 942 break; 943 case GSS_IOV_BUFFER_TYPE_TRAILER: 944 if (trailer != NULL) { 945 *minor_status = EINVAL; 946 return GSS_S_FAILURE; 947 } 948 trailer = &iov[i]; 949 break; 950 case GSS_IOV_BUFFER_TYPE_PADDING: 951 if (padding != NULL) { 952 *minor_status = EINVAL; 953 return GSS_S_FAILURE; 954 } 955 padding = &iov[i]; 956 break; 957 case GSS_IOV_BUFFER_TYPE_SIGN_ONLY: 958 break; 959 default: 960 *minor_status = EINVAL; 961 return GSS_S_FAILURE; 962 } 963 } 964 965 major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer); 966 if (major_status != GSS_S_COMPLETE) { 967 return major_status; 968 } 969 970 if (IS_DCE_STYLE(ctx)) { 971 size_t len = GSS_ARCFOUR_WRAP_TOKEN_SIZE; 972 size_t total_len; 973 _gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 974 header->buffer.length = total_len; 975 } else { 976 size_t len; 977 size_t total_len; 978 if (padding) { 979 data_len += 1; /* padding */ 980 } 981 len = data_len + GSS_ARCFOUR_WRAP_TOKEN_SIZE; 982 _gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM); 983 header->buffer.length = total_len - data_len; 984 } 985 986 if (trailer) { 987 trailer->buffer.length = 0; 988 } 989 990 if (padding) { 991 padding->buffer.length = 1; 992 } 993 994 return GSS_S_COMPLETE; 995 } 996 997 OM_uint32 998 _gssapi_wrap_iov_arcfour(OM_uint32 *minor_status, 999 gsskrb5_ctx ctx, 1000 krb5_context context, 1001 int conf_req_flag, 1002 int *conf_state, 1003 gss_iov_buffer_desc *iov, 1004 int iov_count, 1005 krb5_keyblock *key) 1006 { 1007 OM_uint32 major_status, junk; 1008 gss_iov_buffer_desc *header, *padding, *trailer; 1009 krb5_error_code kret; 1010 int32_t seq_number; 1011 u_char Klocaldata[16], k6_data[16], *p, *p0; 1012 size_t make_len = 0; 1013 size_t header_len = 0; 1014 size_t data_len = 0; 1015 krb5_keyblock Klocal; 1016 int i; 1017 1018 header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER); 1019 padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING); 1020 trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER); 1021 1022 major_status = _gk_verify_buffers(minor_status, ctx, header, padding, trailer); 1023 if (major_status != GSS_S_COMPLETE) { 1024 return major_status; 1025 } 1026 1027 for (i = 0; i < iov_count; i++) { 1028 switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) { 1029 case GSS_IOV_BUFFER_TYPE_DATA: 1030 break; 1031 default: 1032 continue; 1033 } 1034 1035 data_len += iov[i].buffer.length; 1036 } 1037 1038 if (padding) { 1039 data_len += 1; 1040 } 1041 1042 if (IS_DCE_STYLE(ctx)) { 1043 size_t unwrapped_len; 1044 unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE; 1045 _gssapi_encap_length(unwrapped_len, 1046 &make_len, 1047 &header_len, 1048 GSS_KRB5_MECHANISM); 1049 } else { 1050 size_t unwrapped_len; 1051 unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + data_len; 1052 _gssapi_encap_length(unwrapped_len, 1053 &make_len, 1054 &header_len, 1055 GSS_KRB5_MECHANISM); 1056 header_len -= data_len; 1057 } 1058 1059 if (GSS_IOV_BUFFER_FLAGS(header->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) { 1060 major_status = _gk_allocate_buffer(minor_status, header, 1061 header_len); 1062 if (major_status != GSS_S_COMPLETE) 1063 goto failure; 1064 } else if (header->buffer.length < header_len) { 1065 *minor_status = KRB5_BAD_MSIZE; 1066 major_status = GSS_S_FAILURE; 1067 goto failure; 1068 } else { 1069 header->buffer.length = header_len; 1070 } 1071 1072 if (padding) { 1073 if (GSS_IOV_BUFFER_FLAGS(padding->type) & GSS_IOV_BUFFER_TYPE_FLAG_ALLOCATE) { 1074 major_status = _gk_allocate_buffer(minor_status, padding, 1); 1075 if (major_status != GSS_S_COMPLETE) 1076 goto failure; 1077 } else if (padding->buffer.length < 1) { 1078 *minor_status = KRB5_BAD_MSIZE; 1079 major_status = GSS_S_FAILURE; 1080 goto failure; 1081 } else { 1082 padding->buffer.length = 1; 1083 } 1084 memset(padding->buffer.value, 1, 1); 1085 } 1086 1087 if (trailer) { 1088 trailer->buffer.length = 0; 1089 trailer->buffer.value = NULL; 1090 } 1091 1092 p0 = _gssapi_make_mech_header(header->buffer.value, 1093 make_len, 1094 GSS_KRB5_MECHANISM); 1095 p = p0; 1096 1097 *p++ = 0x02; /* TOK_ID */ 1098 *p++ = 0x01; 1099 *p++ = 0x11; /* SGN_ALG */ 1100 *p++ = 0x00; 1101 if (conf_req_flag) { 1102 *p++ = 0x10; /* SEAL_ALG */ 1103 *p++ = 0x00; 1104 } else { 1105 *p++ = 0xff; /* SEAL_ALG */ 1106 *p++ = 0xff; 1107 } 1108 *p++ = 0xff; /* Filler */ 1109 *p++ = 0xff; 1110 1111 p = NULL; 1112 1113 HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex); 1114 krb5_auth_con_getlocalseqnumber(context, 1115 ctx->auth_context, 1116 &seq_number); 1117 _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8); 1118 1119 krb5_auth_con_setlocalseqnumber(context, 1120 ctx->auth_context, 1121 ++seq_number); 1122 HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex); 1123 1124 memset(p0 + 8 + 4, 1125 (ctx->more_flags & LOCAL) ? 0 : 0xff, 1126 4); 1127 1128 krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */ 1129 1130 /* Sign Data */ 1131 kret = arcfour_mic_cksum_iov(context, 1132 key, KRB5_KU_USAGE_SEAL, 1133 p0 + 16, 8, /* SGN_CKSUM */ 1134 p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */ 1135 p0 + 24, 8, /* Confounder */ 1136 iov, iov_count, /* Data + SignOnly */ 1137 padding); /* padding */ 1138 if (kret) { 1139 *minor_status = kret; 1140 major_status = GSS_S_FAILURE; 1141 goto failure; 1142 } 1143 1144 Klocal.keytype = key->keytype; 1145 Klocal.keyvalue.data = Klocaldata; 1146 Klocal.keyvalue.length = sizeof(Klocaldata); 1147 1148 for (i = 0; i < 16; i++) { 1149 Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0; 1150 } 1151 kret = arcfour_mic_key(context, &Klocal, 1152 p0 + 8, 4, /* SND_SEQ */ 1153 k6_data, sizeof(k6_data)); 1154 memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata)); 1155 if (kret) { 1156 *minor_status = kret; 1157 major_status = GSS_S_FAILURE; 1158 goto failure; 1159 } 1160 1161 if (conf_req_flag) { 1162 EVP_CIPHER_CTX *rc4_key; 1163 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1164 EVP_CIPHER_CTX rc4_keys; 1165 rc4_key = &rc4_keys; 1166 EVP_CIPHER_CTX_init(rc4_key); 1167 #else 1168 rc4_key = EVP_CIPHER_CTX_new(); 1169 #endif 1170 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 1171 *minor_status = EINVAL; 1172 return GSS_S_FAILURE; 1173 } 1174 1175 /* Confounder */ 1176 EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8); 1177 1178 /* Seal Data */ 1179 for (i=0; i < iov_count; i++) { 1180 switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) { 1181 case GSS_IOV_BUFFER_TYPE_DATA: 1182 break; 1183 default: 1184 continue; 1185 } 1186 1187 EVP_Cipher(rc4_key, iov[i].buffer.value, 1188 iov[i].buffer.value, iov[i].buffer.length); 1189 } 1190 1191 /* Padding */ 1192 if (padding) { 1193 EVP_Cipher(rc4_key, padding->buffer.value, 1194 padding->buffer.value, padding->buffer.length); 1195 } 1196 1197 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1198 EVP_CIPHER_CTX_cleanup(rc4_key); 1199 #else 1200 EVP_CIPHER_CTX_free(rc4_key); 1201 #endif 1202 } 1203 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 1204 1205 kret = arcfour_mic_key(context, key, 1206 p0 + 16, 8, /* SGN_CKSUM */ 1207 k6_data, sizeof(k6_data)); 1208 if (kret) { 1209 *minor_status = kret; 1210 major_status = GSS_S_FAILURE; 1211 return major_status; 1212 } 1213 1214 { 1215 EVP_CIPHER_CTX *rc4_key; 1216 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1217 EVP_CIPHER_CTX rc4_keys; 1218 rc4_key = &rc4_keys; 1219 EVP_CIPHER_CTX_init(rc4_key); 1220 #else 1221 rc4_key = EVP_CIPHER_CTX_new(); 1222 #endif 1223 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 1224 *minor_status = EINVAL; 1225 return GSS_S_FAILURE; 1226 } 1227 EVP_Cipher(rc4_key, p0 + 8, p0 + 8, 8); /* SND_SEQ */ 1228 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1229 EVP_CIPHER_CTX_cleanup(rc4_key); 1230 #else 1231 EVP_CIPHER_CTX_free(rc4_key); 1232 #endif 1233 1234 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 1235 } 1236 1237 if (conf_state) 1238 *conf_state = conf_req_flag; 1239 1240 *minor_status = 0; 1241 return GSS_S_COMPLETE; 1242 1243 failure: 1244 1245 gss_release_iov_buffer(&junk, iov, iov_count); 1246 1247 return major_status; 1248 } 1249 1250 OM_uint32 1251 _gssapi_unwrap_iov_arcfour(OM_uint32 *minor_status, 1252 gsskrb5_ctx ctx, 1253 krb5_context context, 1254 int *pconf_state, 1255 gss_qop_t *pqop_state, 1256 gss_iov_buffer_desc *iov, 1257 int iov_count, 1258 krb5_keyblock *key) 1259 { 1260 OM_uint32 major_status; 1261 gss_iov_buffer_desc *header, *padding, *trailer; 1262 krb5_keyblock Klocal; 1263 uint8_t Klocaldata[16]; 1264 uint8_t k6_data[16], snd_seq[8], Confounder[8]; 1265 uint8_t cksum_data[8]; 1266 uint8_t *_p = NULL; 1267 const uint8_t *p, *p0; 1268 size_t verify_len = 0; 1269 uint32_t seq_number; 1270 size_t hlen = 0; 1271 int conf_state; 1272 int cmp; 1273 size_t i; 1274 krb5_error_code kret; 1275 OM_uint32 ret; 1276 1277 if (pconf_state != NULL) { 1278 *pconf_state = 0; 1279 } 1280 if (pqop_state != NULL) { 1281 *pqop_state = 0; 1282 } 1283 1284 header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER); 1285 padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING); 1286 trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER); 1287 1288 /* Check if the packet is correct */ 1289 major_status = _gk_verify_buffers(minor_status, 1290 ctx, 1291 header, 1292 padding, 1293 trailer); 1294 if (major_status != GSS_S_COMPLETE) { 1295 return major_status; 1296 } 1297 1298 if (padding != NULL && padding->buffer.length != 1) { 1299 *minor_status = EINVAL; 1300 return GSS_S_FAILURE; 1301 } 1302 1303 if (IS_DCE_STYLE(context)) { 1304 verify_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + 1305 GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE; 1306 if (header->buffer.length > verify_len) { 1307 return GSS_S_BAD_MECH; 1308 } 1309 } else { 1310 verify_len = header->buffer.length; 1311 } 1312 _p = header->buffer.value; 1313 1314 ret = _gssapi_verify_mech_header(&_p, 1315 verify_len, 1316 GSS_KRB5_MECHANISM); 1317 if (ret) { 1318 return ret; 1319 } 1320 p0 = _p; 1321 1322 /* length of mech header */ 1323 hlen = (p0 - (uint8_t *)header->buffer.value); 1324 hlen += GSS_ARCFOUR_WRAP_TOKEN_SIZE; 1325 1326 if (hlen > header->buffer.length) { 1327 return GSS_S_BAD_MECH; 1328 } 1329 1330 p = p0; 1331 1332 if (memcmp(p, "\x02\x01", 2) != 0) 1333 return GSS_S_BAD_SIG; 1334 p += 2; 1335 if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */ 1336 return GSS_S_BAD_SIG; 1337 p += 2; 1338 1339 if (memcmp (p, "\x10\x00", 2) == 0) 1340 conf_state = 1; 1341 else if (memcmp (p, "\xff\xff", 2) == 0) 1342 conf_state = 0; 1343 else 1344 return GSS_S_BAD_SIG; 1345 1346 p += 2; 1347 if (memcmp (p, "\xff\xff", 2) != 0) 1348 return GSS_S_BAD_MIC; 1349 p = NULL; 1350 1351 kret = arcfour_mic_key(context, 1352 key, 1353 p0 + 16, /* SGN_CKSUM */ 1354 8, /* SGN_CKSUM_LEN */ 1355 k6_data, 1356 sizeof(k6_data)); 1357 if (kret) { 1358 *minor_status = kret; 1359 return GSS_S_FAILURE; 1360 } 1361 1362 { 1363 EVP_CIPHER_CTX *rc4_key; 1364 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1365 EVP_CIPHER_CTX rc4_keys; 1366 rc4_key = &rc4_keys; 1367 EVP_CIPHER_CTX_init(rc4_key); 1368 #else 1369 rc4_key = EVP_CIPHER_CTX_new(); 1370 #endif 1371 1372 EVP_CIPHER_CTX_init(rc4_key); 1373 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 1374 *minor_status = EINVAL; 1375 return GSS_S_FAILURE; 1376 } 1377 EVP_Cipher(rc4_key, snd_seq, p0 + 8, 8); /* SND_SEQ */ 1378 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1379 EVP_CIPHER_CTX_cleanup(rc4_key); 1380 #else 1381 EVP_CIPHER_CTX_free(rc4_key); 1382 #endif 1383 1384 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 1385 } 1386 1387 _gsskrb5_decode_be_om_uint32(snd_seq, &seq_number); 1388 1389 if (ctx->more_flags & LOCAL) { 1390 cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4); 1391 } else { 1392 cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4); 1393 } 1394 if (cmp != 0) { 1395 *minor_status = 0; 1396 return GSS_S_BAD_MIC; 1397 } 1398 1399 if (ctx->more_flags & LOCAL) { 1400 cmp = memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4); 1401 } else { 1402 cmp = memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4); 1403 } 1404 if (cmp != 0) { 1405 *minor_status = 0; 1406 return GSS_S_BAD_MIC; 1407 } 1408 1409 /* keyblock */ 1410 Klocal.keytype = key->keytype; 1411 Klocal.keyvalue.data = Klocaldata; 1412 Klocal.keyvalue.length = sizeof(Klocaldata); 1413 1414 for (i = 0; i < 16; i++) { 1415 Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0; 1416 } 1417 1418 kret = arcfour_mic_key(context, 1419 &Klocal, 1420 snd_seq, 1421 4, 1422 k6_data, sizeof(k6_data)); 1423 memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata)); 1424 if (kret) { 1425 *minor_status = kret; 1426 return GSS_S_FAILURE; 1427 } 1428 1429 if (conf_state == 1) { 1430 EVP_CIPHER_CTX *rc4_key; 1431 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1432 EVP_CIPHER_CTX rc4_keys; 1433 rc4_key = &rc4_keys; 1434 EVP_CIPHER_CTX_init(rc4_key); 1435 #else 1436 rc4_key = EVP_CIPHER_CTX_new(); 1437 #endif 1438 1439 if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) { 1440 *minor_status = EINVAL; 1441 return GSS_S_FAILURE; 1442 } 1443 1444 /* Confounder */ 1445 EVP_Cipher(rc4_key, Confounder, p0 + 24, 8); 1446 1447 /* Data */ 1448 for (i = 0; i < iov_count; i++) { 1449 switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) { 1450 case GSS_IOV_BUFFER_TYPE_DATA: 1451 break; 1452 default: 1453 continue; 1454 } 1455 1456 EVP_Cipher(rc4_key, iov[i].buffer.value, 1457 iov[i].buffer.value, iov[i].buffer.length); 1458 } 1459 1460 /* Padding */ 1461 if (padding) { 1462 EVP_Cipher(rc4_key, padding->buffer.value, 1463 padding->buffer.value, padding->buffer.length); 1464 } 1465 1466 #if OPENSSL_VERSION_NUMBER < 0x10100000UL 1467 EVP_CIPHER_CTX_cleanup(rc4_key); 1468 #else 1469 EVP_CIPHER_CTX_free(rc4_key); 1470 #endif 1471 } else { 1472 /* Confounder */ 1473 memcpy(Confounder, p0 + 24, 8); 1474 } 1475 memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data)); 1476 1477 /* Prepare the buffer for signing */ 1478 kret = arcfour_mic_cksum_iov(context, 1479 key, KRB5_KU_USAGE_SEAL, 1480 cksum_data, sizeof(cksum_data), 1481 p0, 8, 1482 Confounder, sizeof(Confounder), 1483 iov, iov_count, 1484 padding); 1485 if (kret) { 1486 *minor_status = kret; 1487 return GSS_S_FAILURE; 1488 } 1489 1490 cmp = memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */ 1491 if (cmp != 0) { 1492 *minor_status = 0; 1493 return GSS_S_BAD_MIC; 1494 } 1495 1496 if (padding) { 1497 size_t plen; 1498 1499 ret = _gssapi_verify_pad(&padding->buffer, 1, &plen); 1500 if (ret) { 1501 *minor_status = 0; 1502 return ret; 1503 } 1504 } 1505 1506 HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex); 1507 ret = _gssapi_msg_order_check(ctx->order, seq_number); 1508 HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex); 1509 if (ret != 0) { 1510 return ret; 1511 } 1512 1513 if (pconf_state) { 1514 *pconf_state = conf_state; 1515 } 1516 1517 *minor_status = 0; 1518 return GSS_S_COMPLETE; 1519 } 1520