1*f0865ec9SKyle Evans /* 2*f0865ec9SKyle Evans * Copyright (C) 2021 - This file is part of libecc project 3*f0865ec9SKyle Evans * 4*f0865ec9SKyle Evans * Authors: 5*f0865ec9SKyle Evans * Ryad BENADJILA <ryadbenadjila@gmail.com> 6*f0865ec9SKyle Evans * Arnaud EBALARD <arnaud.ebalard@ssi.gouv.fr> 7*f0865ec9SKyle Evans * 8*f0865ec9SKyle Evans * This software is licensed under a dual BSD and GPL v2 license. 9*f0865ec9SKyle Evans * See LICENSE file at the root folder of the project. 10*f0865ec9SKyle Evans */ 11*f0865ec9SKyle Evans #include "md4.h" 12*f0865ec9SKyle Evans 13*f0865ec9SKyle Evans /* All the inner MD-4 operations */ 14*f0865ec9SKyle Evans static const u32 C1_MD4[13] = { 15*f0865ec9SKyle Evans 0, 4, 8, 12, 0, 1, 2, 3, 3, 7, 11, 19, 0 16*f0865ec9SKyle Evans }; 17*f0865ec9SKyle Evans static const u32 C2_MD4[13] = { 18*f0865ec9SKyle Evans 0, 1, 2, 3, 0, 4, 8, 12, 3, 5, 9, 13, 0x5a827999 19*f0865ec9SKyle Evans }; 20*f0865ec9SKyle Evans static const u32 C3_MD4[13] = { 21*f0865ec9SKyle Evans 0, 2, 1, 3, 0, 8, 4, 12, 3, 9, 11, 15, 0x6ed9eba1 22*f0865ec9SKyle Evans }; 23*f0865ec9SKyle Evans 24*f0865ec9SKyle Evans #define F_MD4(x, y, z) (((x) & (y)) | ((~(x)) & (z))) 25*f0865ec9SKyle Evans #define G_MD4(x, y, z) (((x) & (y)) | ((x) & (z)) | ((y) & (z))) 26*f0865ec9SKyle Evans #define H_MD4(x, y, z) ((x) ^ (y) ^ (z)) 27*f0865ec9SKyle Evans 28*f0865ec9SKyle Evans /* SHA-2 core processing. Returns 0 on success, -1 on error. */ 29*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int md4_process(md4_context *ctx, 30*f0865ec9SKyle Evans const u8 data[MD4_BLOCK_SIZE]) 31*f0865ec9SKyle Evans { 32*f0865ec9SKyle Evans u32 A, B, C, D; 33*f0865ec9SKyle Evans u32 W[16]; 34*f0865ec9SKyle Evans u32 idx; 35*f0865ec9SKyle Evans int ret; 36*f0865ec9SKyle Evans unsigned int i; 37*f0865ec9SKyle Evans 38*f0865ec9SKyle Evans MUST_HAVE((data != NULL), ret, err); 39*f0865ec9SKyle Evans MD4_HASH_CHECK_INITIALIZED(ctx, ret, err); 40*f0865ec9SKyle Evans 41*f0865ec9SKyle Evans /* Init our inner variables */ 42*f0865ec9SKyle Evans A = ctx->md4_state[0]; 43*f0865ec9SKyle Evans B = ctx->md4_state[1]; 44*f0865ec9SKyle Evans C = ctx->md4_state[2]; 45*f0865ec9SKyle Evans D = ctx->md4_state[3]; 46*f0865ec9SKyle Evans 47*f0865ec9SKyle Evans /* Load data */ 48*f0865ec9SKyle Evans for (i = 0; i < 16; i++) { 49*f0865ec9SKyle Evans GET_UINT32_LE(W[i], data, (4 * i)); 50*f0865ec9SKyle Evans } 51*f0865ec9SKyle Evans /* Proceed with the compression */ 52*f0865ec9SKyle Evans for (i = 0; i < 4; i++) { 53*f0865ec9SKyle Evans idx = (C1_MD4[i] + C1_MD4[4]); 54*f0865ec9SKyle Evans A = ROTL_MD4((A + F_MD4(B, C, D) + W[idx] + C1_MD4[12]), C1_MD4[8]); 55*f0865ec9SKyle Evans idx = (C1_MD4[i] + C1_MD4[5]); 56*f0865ec9SKyle Evans D = ROTL_MD4((D + F_MD4(A, B, C) + W[idx] + C1_MD4[12]), C1_MD4[9]); 57*f0865ec9SKyle Evans idx = (C1_MD4[i] + C1_MD4[6]); 58*f0865ec9SKyle Evans C = ROTL_MD4((C + F_MD4(D, A, B) + W[idx] + C1_MD4[12]), C1_MD4[10]); 59*f0865ec9SKyle Evans idx = (C1_MD4[i] + C1_MD4[7]); 60*f0865ec9SKyle Evans B = ROTL_MD4((B + F_MD4(C, D, A) + W[idx] + C1_MD4[12]), C1_MD4[11]); 61*f0865ec9SKyle Evans } 62*f0865ec9SKyle Evans for (i = 0; i < 4; i++) { 63*f0865ec9SKyle Evans idx = (C2_MD4[i] + C2_MD4[4]); 64*f0865ec9SKyle Evans A = ROTL_MD4((A + G_MD4(B, C, D) + W[idx] + C2_MD4[12]), C2_MD4[8]); 65*f0865ec9SKyle Evans idx = (C2_MD4[i] + C2_MD4[5]); 66*f0865ec9SKyle Evans D = ROTL_MD4((D + G_MD4(A, B, C) + W[idx] + C2_MD4[12]), C2_MD4[9]); 67*f0865ec9SKyle Evans idx = (C2_MD4[i] + C2_MD4[6]); 68*f0865ec9SKyle Evans C = ROTL_MD4((C + G_MD4(D, A, B) + W[idx] + C2_MD4[12]), C2_MD4[10]); 69*f0865ec9SKyle Evans idx = (C2_MD4[i] + C2_MD4[7]); 70*f0865ec9SKyle Evans B = ROTL_MD4((B + G_MD4(C, D, A) + W[idx] + C2_MD4[12]), C2_MD4[11]); 71*f0865ec9SKyle Evans } 72*f0865ec9SKyle Evans for (i = 0; i < 4; i++) { 73*f0865ec9SKyle Evans idx = (C3_MD4[i] + C3_MD4[4]); 74*f0865ec9SKyle Evans A = ROTL_MD4((A + H_MD4(B, C, D) + W[idx] + C3_MD4[12]), C3_MD4[8]); 75*f0865ec9SKyle Evans idx = (C3_MD4[i] + C3_MD4[5]); 76*f0865ec9SKyle Evans D = ROTL_MD4((D + H_MD4(A, B, C) + W[idx] + C3_MD4[12]), C3_MD4[9]); 77*f0865ec9SKyle Evans idx = (C3_MD4[i] + C3_MD4[6]); 78*f0865ec9SKyle Evans C = ROTL_MD4((C + H_MD4(D, A, B) + W[idx] + C3_MD4[12]), C3_MD4[10]); 79*f0865ec9SKyle Evans idx = (C3_MD4[i] + C3_MD4[7]); 80*f0865ec9SKyle Evans B = ROTL_MD4((B + H_MD4(C, D, A) + W[idx] + C3_MD4[12]), C3_MD4[11]); 81*f0865ec9SKyle Evans } 82*f0865ec9SKyle Evans 83*f0865ec9SKyle Evans /* Update state */ 84*f0865ec9SKyle Evans ctx->md4_state[0] += A; 85*f0865ec9SKyle Evans ctx->md4_state[1] += B; 86*f0865ec9SKyle Evans ctx->md4_state[2] += C; 87*f0865ec9SKyle Evans ctx->md4_state[3] += D; 88*f0865ec9SKyle Evans 89*f0865ec9SKyle Evans ret = 0; 90*f0865ec9SKyle Evans 91*f0865ec9SKyle Evans err: 92*f0865ec9SKyle Evans return ret; 93*f0865ec9SKyle Evans } 94*f0865ec9SKyle Evans 95*f0865ec9SKyle Evans /* Init hash function. Returns 0 on success, -1 on error. */ 96*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET int md4_init(md4_context *ctx) 97*f0865ec9SKyle Evans { 98*f0865ec9SKyle Evans int ret; 99*f0865ec9SKyle Evans 100*f0865ec9SKyle Evans MUST_HAVE((ctx != NULL), ret, err); 101*f0865ec9SKyle Evans 102*f0865ec9SKyle Evans /* Sanity check on size */ 103*f0865ec9SKyle Evans MUST_HAVE((MD4_DIGEST_SIZE <= MAX_DIGEST_SIZE), ret, err); 104*f0865ec9SKyle Evans 105*f0865ec9SKyle Evans ctx->md4_total = 0; 106*f0865ec9SKyle Evans ctx->md4_state[0] = 0x67452301; 107*f0865ec9SKyle Evans ctx->md4_state[1] = 0xEFCDAB89; 108*f0865ec9SKyle Evans ctx->md4_state[2] = 0x98BADCFE; 109*f0865ec9SKyle Evans ctx->md4_state[3] = 0x10325476; 110*f0865ec9SKyle Evans 111*f0865ec9SKyle Evans /* Tell that we are initialized */ 112*f0865ec9SKyle Evans ctx->magic = MD4_HASH_MAGIC; 113*f0865ec9SKyle Evans 114*f0865ec9SKyle Evans ret = 0; 115*f0865ec9SKyle Evans 116*f0865ec9SKyle Evans err: 117*f0865ec9SKyle Evans return ret; 118*f0865ec9SKyle Evans } 119*f0865ec9SKyle Evans 120*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET int md4_update(md4_context *ctx, const u8 *input, u32 ilen) 121*f0865ec9SKyle Evans { 122*f0865ec9SKyle Evans const u8 *data_ptr = input; 123*f0865ec9SKyle Evans u32 remain_ilen = ilen; 124*f0865ec9SKyle Evans u16 fill; 125*f0865ec9SKyle Evans u8 left; 126*f0865ec9SKyle Evans int ret; 127*f0865ec9SKyle Evans 128*f0865ec9SKyle Evans MUST_HAVE((input != NULL) || (ilen == 0), ret, err); 129*f0865ec9SKyle Evans MD4_HASH_CHECK_INITIALIZED(ctx, ret, err); 130*f0865ec9SKyle Evans 131*f0865ec9SKyle Evans /* Nothing to process, return */ 132*f0865ec9SKyle Evans if (ilen == 0) { 133*f0865ec9SKyle Evans ret = 0; 134*f0865ec9SKyle Evans goto err; 135*f0865ec9SKyle Evans } 136*f0865ec9SKyle Evans 137*f0865ec9SKyle Evans /* Get what's left in our local buffer */ 138*f0865ec9SKyle Evans left = (ctx->md4_total & 0x3F); 139*f0865ec9SKyle Evans fill = (u16)(MD4_BLOCK_SIZE - left); 140*f0865ec9SKyle Evans 141*f0865ec9SKyle Evans ctx->md4_total += ilen; 142*f0865ec9SKyle Evans 143*f0865ec9SKyle Evans if ((left > 0) && (remain_ilen >= fill)) { 144*f0865ec9SKyle Evans /* Copy data at the end of the buffer */ 145*f0865ec9SKyle Evans ret = local_memcpy(ctx->md4_buffer + left, data_ptr, fill); EG(ret, err); 146*f0865ec9SKyle Evans ret = md4_process(ctx, ctx->md4_buffer); EG(ret, err); 147*f0865ec9SKyle Evans data_ptr += fill; 148*f0865ec9SKyle Evans remain_ilen -= fill; 149*f0865ec9SKyle Evans left = 0; 150*f0865ec9SKyle Evans } 151*f0865ec9SKyle Evans 152*f0865ec9SKyle Evans while (remain_ilen >= MD4_BLOCK_SIZE) { 153*f0865ec9SKyle Evans ret = md4_process(ctx, data_ptr); EG(ret, err); 154*f0865ec9SKyle Evans data_ptr += MD4_BLOCK_SIZE; 155*f0865ec9SKyle Evans remain_ilen -= MD4_BLOCK_SIZE; 156*f0865ec9SKyle Evans } 157*f0865ec9SKyle Evans 158*f0865ec9SKyle Evans if (remain_ilen > 0) { 159*f0865ec9SKyle Evans ret = local_memcpy(ctx->md4_buffer + left, data_ptr, remain_ilen); EG(ret, err); 160*f0865ec9SKyle Evans } 161*f0865ec9SKyle Evans 162*f0865ec9SKyle Evans ret = 0; 163*f0865ec9SKyle Evans 164*f0865ec9SKyle Evans err: 165*f0865ec9SKyle Evans return ret; 166*f0865ec9SKyle Evans } 167*f0865ec9SKyle Evans 168*f0865ec9SKyle Evans /* Finalize. Returns 0 on success, -1 on error.*/ 169*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET int md4_final(md4_context *ctx, u8 output[MD4_DIGEST_SIZE]) 170*f0865ec9SKyle Evans { 171*f0865ec9SKyle Evans unsigned int block_present = 0; 172*f0865ec9SKyle Evans u8 last_padded_block[2 * MD4_BLOCK_SIZE]; 173*f0865ec9SKyle Evans int ret; 174*f0865ec9SKyle Evans 175*f0865ec9SKyle Evans MUST_HAVE((output != NULL), ret, err); 176*f0865ec9SKyle Evans MD4_HASH_CHECK_INITIALIZED(ctx, ret, err); 177*f0865ec9SKyle Evans 178*f0865ec9SKyle Evans /* Fill in our last block with zeroes */ 179*f0865ec9SKyle Evans ret = local_memset(last_padded_block, 0, sizeof(last_padded_block)); EG(ret, err); 180*f0865ec9SKyle Evans 181*f0865ec9SKyle Evans /* This is our final step, so we proceed with the padding */ 182*f0865ec9SKyle Evans block_present = ctx->md4_total % MD4_BLOCK_SIZE; 183*f0865ec9SKyle Evans if (block_present != 0) { 184*f0865ec9SKyle Evans /* Copy what's left in our temporary context buffer */ 185*f0865ec9SKyle Evans ret = local_memcpy(last_padded_block, ctx->md4_buffer, 186*f0865ec9SKyle Evans block_present); EG(ret, err); 187*f0865ec9SKyle Evans } 188*f0865ec9SKyle Evans 189*f0865ec9SKyle Evans /* Put the 0x80 byte, beginning of padding */ 190*f0865ec9SKyle Evans last_padded_block[block_present] = 0x80; 191*f0865ec9SKyle Evans 192*f0865ec9SKyle Evans /* Handle possible additional block */ 193*f0865ec9SKyle Evans if (block_present > (MD4_BLOCK_SIZE - 1 - sizeof(u64))) { 194*f0865ec9SKyle Evans /* We need an additional block */ 195*f0865ec9SKyle Evans PUT_UINT64_LE(8 * ctx->md4_total, last_padded_block, 196*f0865ec9SKyle Evans (2 * MD4_BLOCK_SIZE) - sizeof(u64)); 197*f0865ec9SKyle Evans ret = md4_process(ctx, last_padded_block); EG(ret, err); 198*f0865ec9SKyle Evans ret = md4_process(ctx, last_padded_block + MD4_BLOCK_SIZE); EG(ret, err); 199*f0865ec9SKyle Evans } else { 200*f0865ec9SKyle Evans /* We do not need an additional block */ 201*f0865ec9SKyle Evans PUT_UINT64_LE(8 * ctx->md4_total, last_padded_block, 202*f0865ec9SKyle Evans MD4_BLOCK_SIZE - sizeof(u64)); 203*f0865ec9SKyle Evans ret = md4_process(ctx, last_padded_block); EG(ret, err); 204*f0865ec9SKyle Evans } 205*f0865ec9SKyle Evans 206*f0865ec9SKyle Evans /* Output the hash result */ 207*f0865ec9SKyle Evans PUT_UINT32_LE(ctx->md4_state[0], output, 0); 208*f0865ec9SKyle Evans PUT_UINT32_LE(ctx->md4_state[1], output, 4); 209*f0865ec9SKyle Evans PUT_UINT32_LE(ctx->md4_state[2], output, 8); 210*f0865ec9SKyle Evans PUT_UINT32_LE(ctx->md4_state[3], output, 12); 211*f0865ec9SKyle Evans 212*f0865ec9SKyle Evans /* Tell that we are uninitialized */ 213*f0865ec9SKyle Evans ctx->magic = WORD(0); 214*f0865ec9SKyle Evans 215*f0865ec9SKyle Evans ret = 0; 216*f0865ec9SKyle Evans 217*f0865ec9SKyle Evans err: 218*f0865ec9SKyle Evans return ret; 219*f0865ec9SKyle Evans } 220*f0865ec9SKyle Evans 221*f0865ec9SKyle Evans 222*f0865ec9SKyle Evans /* 223*f0865ec9SKyle Evans * Scattered version performing init/update/finalize on a vector of buffers 224*f0865ec9SKyle Evans * 'inputs' with the length of each buffer passed via 'ilens'. The function 225*f0865ec9SKyle Evans * loops on pointers in 'inputs' until it finds a NULL pointer. The function 226*f0865ec9SKyle Evans * returns 0 on success, -1 on error. 227*f0865ec9SKyle Evans */ 228*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET int md4_scattered(const u8 **inputs, const u32 *ilens, 229*f0865ec9SKyle Evans u8 output[MD4_DIGEST_SIZE]) 230*f0865ec9SKyle Evans { 231*f0865ec9SKyle Evans md4_context ctx; 232*f0865ec9SKyle Evans int ret, pos = 0; 233*f0865ec9SKyle Evans 234*f0865ec9SKyle Evans MUST_HAVE((inputs != NULL) && (ilens != NULL) && (output != NULL), ret, err); 235*f0865ec9SKyle Evans 236*f0865ec9SKyle Evans ret = md4_init(&ctx); EG(ret, err); 237*f0865ec9SKyle Evans 238*f0865ec9SKyle Evans while (inputs[pos] != NULL) { 239*f0865ec9SKyle Evans ret = md4_update(&ctx, inputs[pos], ilens[pos]); EG(ret, err); 240*f0865ec9SKyle Evans pos += 1; 241*f0865ec9SKyle Evans } 242*f0865ec9SKyle Evans 243*f0865ec9SKyle Evans ret = md4_final(&ctx, output); 244*f0865ec9SKyle Evans 245*f0865ec9SKyle Evans err: 246*f0865ec9SKyle Evans return ret; 247*f0865ec9SKyle Evans } 248*f0865ec9SKyle Evans 249*f0865ec9SKyle Evans /* 250*f0865ec9SKyle Evans * Single call version performing init/update/final on given input. 251*f0865ec9SKyle Evans * Returns 0 on success, -1 on error. 252*f0865ec9SKyle Evans */ 253*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET int md4(const u8 *input, u32 ilen, u8 output[MD4_DIGEST_SIZE]) 254*f0865ec9SKyle Evans { 255*f0865ec9SKyle Evans md4_context ctx; 256*f0865ec9SKyle Evans int ret; 257*f0865ec9SKyle Evans 258*f0865ec9SKyle Evans ret = md4_init(&ctx); EG(ret, err); 259*f0865ec9SKyle Evans ret = md4_update(&ctx, input, ilen); EG(ret, err); 260*f0865ec9SKyle Evans ret = md4_final(&ctx, output); 261*f0865ec9SKyle Evans 262*f0865ec9SKyle Evans err: 263*f0865ec9SKyle Evans return ret; 264*f0865ec9SKyle Evans } 265