1*f0865ec9SKyle Evans /* 2*f0865ec9SKyle Evans * Copyright (C) 2022 - This file is part of libecc project 3*f0865ec9SKyle Evans * 4*f0865ec9SKyle Evans * Authors: 5*f0865ec9SKyle Evans * Ryad BENADJILA <ryadbenadjila@gmail.com> 6*f0865ec9SKyle Evans * Arnaud EBALARD <arnaud.ebalard@ssi.gouv.fr> 7*f0865ec9SKyle Evans * 8*f0865ec9SKyle Evans * This software is licensed under a dual BSD and GPL v2 license. 9*f0865ec9SKyle Evans * See LICENSE file at the root folder of the project. 10*f0865ec9SKyle Evans */ 11*f0865ec9SKyle Evans #include <libecc/lib_ecc_config.h> 12*f0865ec9SKyle Evans #if defined(WITH_SIG_BIGN) || defined(WITH_SIG_DBIGN) 13*f0865ec9SKyle Evans 14*f0865ec9SKyle Evans #include <libecc/nn/nn_rand.h> 15*f0865ec9SKyle Evans #include <libecc/nn/nn_mul_public.h> 16*f0865ec9SKyle Evans #include <libecc/nn/nn_logical.h> 17*f0865ec9SKyle Evans 18*f0865ec9SKyle Evans #include <libecc/sig/sig_algs_internal.h> 19*f0865ec9SKyle Evans #include <libecc/sig/ec_key.h> 20*f0865ec9SKyle Evans #include <libecc/utils/utils.h> 21*f0865ec9SKyle Evans #ifdef VERBOSE_INNER_VALUES 22*f0865ec9SKyle Evans #define EC_SIG_ALG "BIGN" 23*f0865ec9SKyle Evans #endif 24*f0865ec9SKyle Evans #include <libecc/utils/dbg_sig.h> 25*f0865ec9SKyle Evans 26*f0865ec9SKyle Evans /* 27*f0865ec9SKyle Evans * This is an implementation of the BIGN signature algorithm as 28*f0865ec9SKyle Evans * described in the STB 34.101.45 standard 29*f0865ec9SKyle Evans * (http://apmi.bsu.by/assets/files/std/bign-spec29.pdf). 30*f0865ec9SKyle Evans * 31*f0865ec9SKyle Evans * The BIGN signature is a variation on the Shnorr signature scheme. 32*f0865ec9SKyle Evans * 33*f0865ec9SKyle Evans * An english high-level (less formal) description and rationale can be found 34*f0865ec9SKyle Evans * in the IETF archive: 35*f0865ec9SKyle Evans * https://mailarchive.ietf.org/arch/msg/cfrg/pI92HSRjMBg50NVEz32L5RciVBk/ 36*f0865ec9SKyle Evans * 37*f0865ec9SKyle Evans * BIGN comes in two flavors: deterministic and non-deterministic. The current 38*f0865ec9SKyle Evans * file implements the two. 39*f0865ec9SKyle Evans * 40*f0865ec9SKyle Evans * In this implementation, we are *on purpose* more lax than the STB standard regarding 41*f0865ec9SKyle Evans * the so called "internal"/"external" hash function sizes and the order size: 42*f0865ec9SKyle Evans * - We accept order sizes that might be different than twice the internal hash 43*f0865ec9SKyle Evans * function (HASH-BELT truncated) and the size of the external hash function. 44*f0865ec9SKyle Evans * - We accept security levels that might be different from {128, 192, 256}. 45*f0865ec9SKyle Evans * 46*f0865ec9SKyle Evans * If we strictly conform to STB 34.101.45, only orders of size exactly twice the 47*f0865ec9SKyle Evans * internal hash function length are accepted, and only external hash functions of size 48*f0865ec9SKyle Evans * of the order are accepted. Also only security levels of 128, 192 or 256 bits 49*f0865ec9SKyle Evans * are accepted. 50*f0865ec9SKyle Evans * 51*f0865ec9SKyle Evans * Being more lax on these parameters allows to be compatible with more hash 52*f0865ec9SKyle Evans * functions and curves. 53*f0865ec9SKyle Evans * 54*f0865ec9SKyle Evans * Finally, although the IETF archive in english leaves the "internal" hash functions 55*f0865ec9SKyle Evans * as configurable (wrt size constraints), the STB 34.101.45 standard fixes the BELT hash 56*f0865ec9SKyle Evans * function (standardized in STB 34.101.31) as the one to be used. The current file follows 57*f0865ec9SKyle Evans * this mandatory requirement and uses BELT as the only possible internal hash function 58*f0865ec9SKyle Evans * while the external one is configurable. 59*f0865ec9SKyle Evans * 60*f0865ec9SKyle Evans */ 61*f0865ec9SKyle Evans 62*f0865ec9SKyle Evans /* NOTE: BIGN uses per its standard the BELT-HASH hash function as its "internal" 63*f0865ec9SKyle Evans * hash function, as well as the BELT encryption block cipher during the deterministic 64*f0865ec9SKyle Evans * computation of the nonce for the deterministic version of BIGN. 65*f0865ec9SKyle Evans * Hence the sanity check below. 66*f0865ec9SKyle Evans */ 67*f0865ec9SKyle Evans #if !defined(WITH_HASH_BELT_HASH) 68*f0865ec9SKyle Evans #error "BIGN and DBIGN need BELT-HASH, please activate it!" 69*f0865ec9SKyle Evans #endif 70*f0865ec9SKyle Evans 71*f0865ec9SKyle Evans 72*f0865ec9SKyle Evans /* Reverses the endiannes of a buffer in place */ 73*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static inline int _reverse_endianness(u8 *buf, u16 buf_size) 74*f0865ec9SKyle Evans { 75*f0865ec9SKyle Evans u16 i; 76*f0865ec9SKyle Evans u8 tmp; 77*f0865ec9SKyle Evans int ret; 78*f0865ec9SKyle Evans 79*f0865ec9SKyle Evans MUST_HAVE((buf != NULL), ret, err); 80*f0865ec9SKyle Evans 81*f0865ec9SKyle Evans if(buf_size > 1){ 82*f0865ec9SKyle Evans for(i = 0; i < (buf_size / 2); i++){ 83*f0865ec9SKyle Evans tmp = buf[i]; 84*f0865ec9SKyle Evans buf[i] = buf[buf_size - 1 - i]; 85*f0865ec9SKyle Evans buf[buf_size - 1 - i] = tmp; 86*f0865ec9SKyle Evans } 87*f0865ec9SKyle Evans } 88*f0865ec9SKyle Evans 89*f0865ec9SKyle Evans ret = 0; 90*f0865ec9SKyle Evans err: 91*f0865ec9SKyle Evans return ret; 92*f0865ec9SKyle Evans } 93*f0865ec9SKyle Evans 94*f0865ec9SKyle Evans /* The additional data for bign are specific. We provide 95*f0865ec9SKyle Evans * helpers to extract them from an adata pointer. 96*f0865ec9SKyle Evans */ 97*f0865ec9SKyle Evans int bign_get_oid_from_adata(const u8 *adata, u16 adata_len, const u8 **oid_ptr, u16 *oid_len) 98*f0865ec9SKyle Evans { 99*f0865ec9SKyle Evans int ret; 100*f0865ec9SKyle Evans u16 t_len; 101*f0865ec9SKyle Evans 102*f0865ec9SKyle Evans MUST_HAVE((adata != NULL) && (oid_ptr != NULL) && (oid_len != NULL), ret, err); 103*f0865ec9SKyle Evans MUST_HAVE((adata_len >= 4), ret, err); 104*f0865ec9SKyle Evans 105*f0865ec9SKyle Evans (*oid_len) = (u16)(((u16)adata[0] << 8) | adata[1]); 106*f0865ec9SKyle Evans t_len = (u16)(((u16)adata[2] << 8) | adata[3]); 107*f0865ec9SKyle Evans /* Check overflow */ 108*f0865ec9SKyle Evans MUST_HAVE(((*oid_len) + t_len) >= (t_len), ret, err); 109*f0865ec9SKyle Evans MUST_HAVE(((*oid_len) + t_len) <= (adata_len - 4), ret, err); 110*f0865ec9SKyle Evans (*oid_ptr) = &adata[4]; 111*f0865ec9SKyle Evans 112*f0865ec9SKyle Evans ret = 0; 113*f0865ec9SKyle Evans err: 114*f0865ec9SKyle Evans if(ret && (oid_ptr != NULL)){ 115*f0865ec9SKyle Evans (*oid_ptr) = NULL; 116*f0865ec9SKyle Evans } 117*f0865ec9SKyle Evans if(ret && (oid_len != NULL)){ 118*f0865ec9SKyle Evans (*oid_len) = 0; 119*f0865ec9SKyle Evans } 120*f0865ec9SKyle Evans return ret; 121*f0865ec9SKyle Evans } 122*f0865ec9SKyle Evans 123*f0865ec9SKyle Evans int bign_get_t_from_adata(const u8 *adata, u16 adata_len, const u8 **t_ptr, u16 *t_len) 124*f0865ec9SKyle Evans { 125*f0865ec9SKyle Evans int ret; 126*f0865ec9SKyle Evans u16 oid_len; 127*f0865ec9SKyle Evans 128*f0865ec9SKyle Evans MUST_HAVE((adata != NULL) && (t_ptr != NULL) && (t_len != NULL), ret, err); 129*f0865ec9SKyle Evans MUST_HAVE((adata_len >= 4), ret, err); 130*f0865ec9SKyle Evans 131*f0865ec9SKyle Evans oid_len = (u16)(((u16)adata[0] << 8) | adata[1]); 132*f0865ec9SKyle Evans (*t_len) = (u16)(((u16)adata[2] << 8) | adata[3]); 133*f0865ec9SKyle Evans /* Check overflow */ 134*f0865ec9SKyle Evans MUST_HAVE((oid_len + (*t_len)) >= (oid_len), ret, err); 135*f0865ec9SKyle Evans MUST_HAVE((oid_len + (*t_len)) <= (adata_len - 4), ret, err); 136*f0865ec9SKyle Evans (*t_ptr) = &adata[4 + oid_len]; 137*f0865ec9SKyle Evans 138*f0865ec9SKyle Evans ret = 0; 139*f0865ec9SKyle Evans err: 140*f0865ec9SKyle Evans if(ret && (t_ptr != NULL)){ 141*f0865ec9SKyle Evans (*t_ptr) = NULL; 142*f0865ec9SKyle Evans } 143*f0865ec9SKyle Evans if(ret && (t_len != NULL)){ 144*f0865ec9SKyle Evans (*t_len) = 0; 145*f0865ec9SKyle Evans } 146*f0865ec9SKyle Evans return ret; 147*f0865ec9SKyle Evans } 148*f0865ec9SKyle Evans 149*f0865ec9SKyle Evans int bign_set_adata(u8 *adata, u16 adata_len, const u8 *oid, u16 oid_len, const u8 *t, u16 t_len) 150*f0865ec9SKyle Evans { 151*f0865ec9SKyle Evans int ret; 152*f0865ec9SKyle Evans 153*f0865ec9SKyle Evans MUST_HAVE((adata != NULL), ret, err); 154*f0865ec9SKyle Evans 155*f0865ec9SKyle Evans MUST_HAVE((oid != NULL) || (oid_len == 0), ret, err); 156*f0865ec9SKyle Evans MUST_HAVE((t != NULL) || (t_len == 0), ret, err); 157*f0865ec9SKyle Evans MUST_HAVE((adata_len >= 4), ret, err); 158*f0865ec9SKyle Evans /* Check overflow */ 159*f0865ec9SKyle Evans MUST_HAVE(((oid_len + t_len) >= oid_len), ret, err); 160*f0865ec9SKyle Evans MUST_HAVE(((adata_len - 4) >= (oid_len + t_len)), ret, err); 161*f0865ec9SKyle Evans 162*f0865ec9SKyle Evans if(oid != NULL){ 163*f0865ec9SKyle Evans adata[0] = (u8)(oid_len >> 8); 164*f0865ec9SKyle Evans adata[1] = (u8)(oid_len & 0xff); 165*f0865ec9SKyle Evans ret = local_memcpy(&adata[4], oid, oid_len); EG(ret, err); 166*f0865ec9SKyle Evans } 167*f0865ec9SKyle Evans else{ 168*f0865ec9SKyle Evans adata[0] = adata[1] = 0; 169*f0865ec9SKyle Evans } 170*f0865ec9SKyle Evans if(t != NULL){ 171*f0865ec9SKyle Evans adata[2] = (u8)(t_len >> 8); 172*f0865ec9SKyle Evans adata[3] = (u8)(t_len & 0xff); 173*f0865ec9SKyle Evans ret = local_memcpy(&adata[4 + oid_len], t, t_len); EG(ret, err); 174*f0865ec9SKyle Evans 175*f0865ec9SKyle Evans } 176*f0865ec9SKyle Evans else{ 177*f0865ec9SKyle Evans adata[2] = adata[3] = 0; 178*f0865ec9SKyle Evans } 179*f0865ec9SKyle Evans 180*f0865ec9SKyle Evans ret = 0; 181*f0865ec9SKyle Evans err: 182*f0865ec9SKyle Evans return ret; 183*f0865ec9SKyle Evans } 184*f0865ec9SKyle Evans 185*f0865ec9SKyle Evans #if defined(WITH_SIG_DBIGN) 186*f0865ec9SKyle Evans /* 187*f0865ec9SKyle Evans * Deterministic nonce generation function for deterministic BIGN, as 188*f0865ec9SKyle Evans * described in STB 34.101.45 6.3.3. 189*f0865ec9SKyle Evans * 190*f0865ec9SKyle Evans * NOTE: Deterministic nonce generation for BIGN is useful against attackers 191*f0865ec9SKyle Evans * in contexts where only poor RNG/entropy are available, or when nonce bits 192*f0865ec9SKyle Evans * leaking can be possible through side-channel attacks. 193*f0865ec9SKyle Evans * However, in contexts where fault attacks are easy to mount, deterministic 194*f0865ec9SKyle Evans * BIGN can bring more security risks than regular BIGN. 195*f0865ec9SKyle Evans * 196*f0865ec9SKyle Evans * Depending on the context where you use the library, choose carefully if 197*f0865ec9SKyle Evans * you want to use the deterministic version or not. 198*f0865ec9SKyle Evans * 199*f0865ec9SKyle Evans */ 200*f0865ec9SKyle Evans ATTRIBUTE_WARN_UNUSED_RET static int __bign_determinitic_nonce(nn_t k, nn_src_t q, bitcnt_t q_bit_len, 201*f0865ec9SKyle Evans nn_src_t x, const u8 *adata, u16 adata_len, 202*f0865ec9SKyle Evans const u8 *h, u8 hlen) 203*f0865ec9SKyle Evans { 204*f0865ec9SKyle Evans int ret, cmp, iszero; 205*f0865ec9SKyle Evans u8 theta[BELT_HASH_DIGEST_SIZE]; 206*f0865ec9SKyle Evans u8 FE2OS_D[LOCAL_MAX(BYTECEIL(CURVES_MAX_Q_BIT_LEN), 2 * BELT_HASH_DIGEST_SIZE)]; 207*f0865ec9SKyle Evans u8 r[((MAX_DIGEST_SIZE / BELT_BLOCK_LEN) * BELT_BLOCK_LEN) + (2 * BELT_BLOCK_LEN)]; 208*f0865ec9SKyle Evans u8 r_bar[((MAX_DIGEST_SIZE / BELT_BLOCK_LEN) * BELT_BLOCK_LEN) + (2 * BELT_BLOCK_LEN)]; 209*f0865ec9SKyle Evans u8 q_len, l; 210*f0865ec9SKyle Evans unsigned int j, z, n; 211*f0865ec9SKyle Evans u32 i; 212*f0865ec9SKyle Evans u16 r_bar_len; 213*f0865ec9SKyle Evans 214*f0865ec9SKyle Evans belt_hash_context belt_hash_ctx; 215*f0865ec9SKyle Evans const u8 *oid_ptr = NULL; 216*f0865ec9SKyle Evans const u8 *t_ptr = NULL; 217*f0865ec9SKyle Evans u16 oid_len = 0, t_len = 0; 218*f0865ec9SKyle Evans 219*f0865ec9SKyle Evans MUST_HAVE((adata != NULL) && (h != NULL), ret, err); 220*f0865ec9SKyle Evans ret = nn_check_initialized(q); EG(ret, err); 221*f0865ec9SKyle Evans ret = nn_check_initialized(x); EG(ret, err); 222*f0865ec9SKyle Evans 223*f0865ec9SKyle Evans ret = local_memset(theta, 0, sizeof(theta)); EG(ret, err); 224*f0865ec9SKyle Evans ret = local_memset(FE2OS_D, 0, sizeof(FE2OS_D)); EG(ret, err); 225*f0865ec9SKyle Evans ret = local_memset(r_bar, 0, sizeof(r_bar)); EG(ret, err); 226*f0865ec9SKyle Evans 227*f0865ec9SKyle Evans q_len = (u8)BYTECEIL(q_bit_len); 228*f0865ec9SKyle Evans 229*f0865ec9SKyle Evans /* Compute l depending on the order */ 230*f0865ec9SKyle Evans l = (u8)BIGN_S0_LEN(q_bit_len); 231*f0865ec9SKyle Evans 232*f0865ec9SKyle Evans /* Extract oid and t from the additional data */ 233*f0865ec9SKyle Evans ret = bign_get_oid_from_adata(adata, adata_len, &oid_ptr, &oid_len); EG(ret, err); 234*f0865ec9SKyle Evans ret = bign_get_t_from_adata(adata, adata_len, &t_ptr, &t_len); EG(ret, err); 235*f0865ec9SKyle Evans 236*f0865ec9SKyle Evans ret = belt_hash_init(&belt_hash_ctx); EG(ret, err); 237*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, oid_ptr, oid_len); EG(ret, err); 238*f0865ec9SKyle Evans 239*f0865ec9SKyle Evans /* Put the private key in a string <d>2*l */ 240*f0865ec9SKyle Evans ret = local_memset(FE2OS_D, 0, sizeof(FE2OS_D)); EG(ret, err); 241*f0865ec9SKyle Evans ret = nn_export_to_buf(&FE2OS_D[0], q_len, x); EG(ret, err); 242*f0865ec9SKyle Evans ret = _reverse_endianness(&FE2OS_D[0], q_len); EG(ret, err); 243*f0865ec9SKyle Evans /* Only hash the 2*l bytes of d */ 244*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, &FE2OS_D[0], (u32)(2*l)); EG(ret, err); 245*f0865ec9SKyle Evans 246*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, t_ptr, t_len); EG(ret, err); 247*f0865ec9SKyle Evans 248*f0865ec9SKyle Evans ret = belt_hash_final(&belt_hash_ctx, theta); EG(ret, err); 249*f0865ec9SKyle Evans 250*f0865ec9SKyle Evans dbg_buf_print("theta", theta, BELT_HASH_DIGEST_SIZE); 251*f0865ec9SKyle Evans 252*f0865ec9SKyle Evans /* n is the number of 128 bits blocks in H */ 253*f0865ec9SKyle Evans n = (hlen / BELT_BLOCK_LEN); 254*f0865ec9SKyle Evans 255*f0865ec9SKyle Evans MUST_HAVE((hlen <= sizeof(r)), ret, err); 256*f0865ec9SKyle Evans ret = local_memset(r, 0, sizeof(r)); 257*f0865ec9SKyle Evans ret = local_memcpy(r, h, hlen); EG(ret, err); 258*f0865ec9SKyle Evans /* If we have less than two blocks for the input hash size, we use zero 259*f0865ec9SKyle Evans * padding to achieve at least two blocks. 260*f0865ec9SKyle Evans * NOTE: this is not in the standard but allows to be compatible with small 261*f0865ec9SKyle Evans * size hash functions. 262*f0865ec9SKyle Evans */ 263*f0865ec9SKyle Evans if(n <= 1){ 264*f0865ec9SKyle Evans n = 2; 265*f0865ec9SKyle Evans } 266*f0865ec9SKyle Evans 267*f0865ec9SKyle Evans /* Now iterate until the nonce is computed in [1, q-1] 268*f0865ec9SKyle Evans * NOTE: we are ensured here that n >= 2, which allows us to 269*f0865ec9SKyle Evans * index (n-1) and (n-2) blocks in r. 270*f0865ec9SKyle Evans */ 271*f0865ec9SKyle Evans i = (u32)1; 272*f0865ec9SKyle Evans 273*f0865ec9SKyle Evans while(1){ 274*f0865ec9SKyle Evans u8 s[BELT_BLOCK_LEN]; 275*f0865ec9SKyle Evans u8 i_block[BELT_BLOCK_LEN]; 276*f0865ec9SKyle Evans ret = local_memset(s, 0, sizeof(s)); EG(ret, err); 277*f0865ec9SKyle Evans 278*f0865ec9SKyle Evans /* Put the xor of all n-1 elements in s */ 279*f0865ec9SKyle Evans for(j = 0; j < (n - 1); j++){ 280*f0865ec9SKyle Evans for(z = 0; z < BELT_BLOCK_LEN; z++){ 281*f0865ec9SKyle Evans s[z] ^= r[(BELT_BLOCK_LEN * j) + z]; 282*f0865ec9SKyle Evans } 283*f0865ec9SKyle Evans } 284*f0865ec9SKyle Evans /* Move elements left for the first n-2 elements */ 285*f0865ec9SKyle Evans ret = local_memcpy(&r[0], &r[BELT_BLOCK_LEN], (n - 2) * BELT_BLOCK_LEN); EG(ret, err); 286*f0865ec9SKyle Evans 287*f0865ec9SKyle Evans /* r_n-1 = belt-block(s, theta) ^ r_n ^ <i>128 */ 288*f0865ec9SKyle Evans ret = local_memset(i_block, 0, sizeof(i_block)); EG(ret, err); 289*f0865ec9SKyle Evans PUT_UINT32_LE(i, i_block, 0); 290*f0865ec9SKyle Evans belt_encrypt(s, &r[(n - 2) * BELT_BLOCK_LEN], theta); 291*f0865ec9SKyle Evans for(z = 0; z < BELT_BLOCK_LEN; z++){ 292*f0865ec9SKyle Evans r[((n - 2) * BELT_BLOCK_LEN) + z] ^= (r[((n - 1) * BELT_BLOCK_LEN) + z] ^ i_block[z]); 293*f0865ec9SKyle Evans } 294*f0865ec9SKyle Evans 295*f0865ec9SKyle Evans /* r_n = s */ 296*f0865ec9SKyle Evans ret = local_memcpy(&r[(n - 1) * BELT_BLOCK_LEN], s, BELT_BLOCK_LEN); EG(ret, err); 297*f0865ec9SKyle Evans 298*f0865ec9SKyle Evans /* Import r_bar as a big number in little endian 299*f0865ec9SKyle Evans * (truncate our import to the bitlength size of q) 300*f0865ec9SKyle Evans */ 301*f0865ec9SKyle Evans if(q_len < (n * BELT_BLOCK_LEN)){ 302*f0865ec9SKyle Evans r_bar_len = q_len; 303*f0865ec9SKyle Evans ret = local_memcpy(&r_bar[0], &r[0], r_bar_len); EG(ret, err); 304*f0865ec9SKyle Evans /* Handle the useless bits between q_bit_len and (8 * q_len) */ 305*f0865ec9SKyle Evans if((q_bit_len % 8) != 0){ 306*f0865ec9SKyle Evans r_bar[r_bar_len - 1] &= (u8)((0x1 << (q_bit_len % 8)) - 1); 307*f0865ec9SKyle Evans } 308*f0865ec9SKyle Evans } 309*f0865ec9SKyle Evans else{ 310*f0865ec9SKyle Evans /* In this case, q_len is bigger than the size of r, we need to adapt: 311*f0865ec9SKyle Evans * we truncate to the size of r. 312*f0865ec9SKyle Evans * NOTE: we of course lose security, but this is the explicit choice 313*f0865ec9SKyle Evans * of the user using a "small" hash function with a "big" order. 314*f0865ec9SKyle Evans */ 315*f0865ec9SKyle Evans MUST_HAVE((n * BELT_BLOCK_LEN) <= 0xffff, ret, err); 316*f0865ec9SKyle Evans r_bar_len = (u16)(n * BELT_BLOCK_LEN); 317*f0865ec9SKyle Evans ret = local_memcpy(&r_bar[0], &r[0], r_bar_len); EG(ret, err); 318*f0865ec9SKyle Evans } 319*f0865ec9SKyle Evans ret = _reverse_endianness(&r_bar[0], r_bar_len); EG(ret, err); 320*f0865ec9SKyle Evans ret = nn_init_from_buf(k, &r_bar[0], r_bar_len); EG(ret, err); 321*f0865ec9SKyle Evans 322*f0865ec9SKyle Evans /* Compare it to q */ 323*f0865ec9SKyle Evans ret = nn_cmp(k, q, &cmp); EG(ret, err); 324*f0865ec9SKyle Evans /* Compare it to 0 */ 325*f0865ec9SKyle Evans ret = nn_iszero(k, &iszero); EG(ret, err); 326*f0865ec9SKyle Evans 327*f0865ec9SKyle Evans if((i >= (2 * n)) && (cmp < 0) && (!iszero)){ 328*f0865ec9SKyle Evans break; 329*f0865ec9SKyle Evans } 330*f0865ec9SKyle Evans i += (u32)1; 331*f0865ec9SKyle Evans /* If we have wrapped (meaning i > 2^32), we exit with failure */ 332*f0865ec9SKyle Evans MUST_HAVE((i != 0), ret, err); 333*f0865ec9SKyle Evans } 334*f0865ec9SKyle Evans 335*f0865ec9SKyle Evans ret = 0; 336*f0865ec9SKyle Evans err: 337*f0865ec9SKyle Evans /* Destroy local variables potentially containing sensitive data */ 338*f0865ec9SKyle Evans IGNORE_RET_VAL(local_memset(theta, 0, sizeof(theta))); 339*f0865ec9SKyle Evans IGNORE_RET_VAL(local_memset(FE2OS_D, 0, sizeof(FE2OS_D))); 340*f0865ec9SKyle Evans 341*f0865ec9SKyle Evans return ret; 342*f0865ec9SKyle Evans } 343*f0865ec9SKyle Evans #endif 344*f0865ec9SKyle Evans 345*f0865ec9SKyle Evans int __bign_init_pub_key(ec_pub_key *out_pub, const ec_priv_key *in_priv, 346*f0865ec9SKyle Evans ec_alg_type key_type) 347*f0865ec9SKyle Evans { 348*f0865ec9SKyle Evans prj_pt_src_t G; 349*f0865ec9SKyle Evans int ret, cmp; 350*f0865ec9SKyle Evans nn_src_t q; 351*f0865ec9SKyle Evans 352*f0865ec9SKyle Evans MUST_HAVE((out_pub != NULL), ret, err); 353*f0865ec9SKyle Evans 354*f0865ec9SKyle Evans /* Zero init public key to be generated */ 355*f0865ec9SKyle Evans ret = local_memset(out_pub, 0, sizeof(ec_pub_key)); EG(ret, err); 356*f0865ec9SKyle Evans 357*f0865ec9SKyle Evans ret = priv_key_check_initialized_and_type(in_priv, key_type); EG(ret, err); 358*f0865ec9SKyle Evans q = &(in_priv->params->ec_gen_order); 359*f0865ec9SKyle Evans 360*f0865ec9SKyle Evans /* Sanity check on key compliance */ 361*f0865ec9SKyle Evans MUST_HAVE((!nn_cmp(&(in_priv->x), q, &cmp)) && (cmp < 0), ret, err); 362*f0865ec9SKyle Evans 363*f0865ec9SKyle Evans /* Y = xG */ 364*f0865ec9SKyle Evans G = &(in_priv->params->ec_gen); 365*f0865ec9SKyle Evans /* Use blinding when computing point scalar multiplication */ 366*f0865ec9SKyle Evans ret = prj_pt_mul_blind(&(out_pub->y), &(in_priv->x), G); EG(ret, err); 367*f0865ec9SKyle Evans 368*f0865ec9SKyle Evans out_pub->key_type = key_type; 369*f0865ec9SKyle Evans out_pub->params = in_priv->params; 370*f0865ec9SKyle Evans out_pub->magic = PUB_KEY_MAGIC; 371*f0865ec9SKyle Evans 372*f0865ec9SKyle Evans err: 373*f0865ec9SKyle Evans return ret; 374*f0865ec9SKyle Evans } 375*f0865ec9SKyle Evans 376*f0865ec9SKyle Evans int __bign_siglen(u16 p_bit_len, u16 q_bit_len, u8 hsize, u8 blocksize, u8 *siglen) 377*f0865ec9SKyle Evans { 378*f0865ec9SKyle Evans int ret; 379*f0865ec9SKyle Evans 380*f0865ec9SKyle Evans MUST_HAVE(siglen != NULL, ret, err); 381*f0865ec9SKyle Evans MUST_HAVE((p_bit_len <= CURVES_MAX_P_BIT_LEN) && 382*f0865ec9SKyle Evans (q_bit_len <= CURVES_MAX_Q_BIT_LEN) && 383*f0865ec9SKyle Evans (hsize <= MAX_DIGEST_SIZE) && (blocksize <= MAX_BLOCK_SIZE), ret, err); 384*f0865ec9SKyle Evans (*siglen) = (u8)BIGN_SIGLEN(q_bit_len); 385*f0865ec9SKyle Evans ret = 0; 386*f0865ec9SKyle Evans 387*f0865ec9SKyle Evans err: 388*f0865ec9SKyle Evans return ret; 389*f0865ec9SKyle Evans } 390*f0865ec9SKyle Evans 391*f0865ec9SKyle Evans /* 392*f0865ec9SKyle Evans * Generic *internal* BIGN signature functions (init, update and finalize). 393*f0865ec9SKyle Evans * Their purpose is to allow passing a specific hash function (along with 394*f0865ec9SKyle Evans * its output size) and the random ephemeral key k, so that compliance 395*f0865ec9SKyle Evans * tests against test vectors can be made without ugly hack in the code 396*f0865ec9SKyle Evans * itself. 397*f0865ec9SKyle Evans * 398*f0865ec9SKyle Evans * Implementation notes: 399*f0865ec9SKyle Evans * 400*f0865ec9SKyle Evans * a) The BIGN algorithm makes use of the OID of the external hash function. 401*f0865ec9SKyle Evans * We let the upper layer provide us with this in the "adata" field of the 402*f0865ec9SKyle Evans * context. 403*f0865ec9SKyle Evans * 404*f0865ec9SKyle Evans */ 405*f0865ec9SKyle Evans 406*f0865ec9SKyle Evans #define BIGN_SIGN_MAGIC ((word_t)(0x63439a2b38921340ULL)) 407*f0865ec9SKyle Evans #define BIGN_SIGN_CHECK_INITIALIZED(A, ret, err) \ 408*f0865ec9SKyle Evans MUST_HAVE((((void *)(A)) != NULL) && ((A)->magic == BIGN_SIGN_MAGIC), ret, err) 409*f0865ec9SKyle Evans 410*f0865ec9SKyle Evans int __bign_sign_init(struct ec_sign_context *ctx, ec_alg_type key_type) 411*f0865ec9SKyle Evans { 412*f0865ec9SKyle Evans int ret; 413*f0865ec9SKyle Evans 414*f0865ec9SKyle Evans /* First, verify context has been initialized */ 415*f0865ec9SKyle Evans ret = sig_sign_check_initialized(ctx); EG(ret, err); 416*f0865ec9SKyle Evans 417*f0865ec9SKyle Evans /* Additional sanity checks on input params from context */ 418*f0865ec9SKyle Evans ret = key_pair_check_initialized_and_type(ctx->key_pair, key_type); EG(ret, err); 419*f0865ec9SKyle Evans 420*f0865ec9SKyle Evans MUST_HAVE((ctx->h != NULL) && (ctx->h->digest_size <= MAX_DIGEST_SIZE) && 421*f0865ec9SKyle Evans (ctx->h->block_size <= MAX_BLOCK_SIZE), ret, err); 422*f0865ec9SKyle Evans 423*f0865ec9SKyle Evans /* We check that our additional data is not NULL as it must contain 424*f0865ec9SKyle Evans * the mandatory external hash OID. 425*f0865ec9SKyle Evans */ 426*f0865ec9SKyle Evans MUST_HAVE((ctx->adata != NULL) && (ctx->adata_len != 0), ret, err); 427*f0865ec9SKyle Evans 428*f0865ec9SKyle Evans /* 429*f0865ec9SKyle Evans * Initialize hash context stored in our private part of context 430*f0865ec9SKyle Evans * and record data init has been done 431*f0865ec9SKyle Evans */ 432*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 433*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 434*f0865ec9SKyle Evans ret = ctx->h->hfunc_init(&(ctx->sign_data.bign.h_ctx)); EG(ret, err); 435*f0865ec9SKyle Evans 436*f0865ec9SKyle Evans ctx->sign_data.bign.magic = BIGN_SIGN_MAGIC; 437*f0865ec9SKyle Evans 438*f0865ec9SKyle Evans err: 439*f0865ec9SKyle Evans return ret; 440*f0865ec9SKyle Evans } 441*f0865ec9SKyle Evans 442*f0865ec9SKyle Evans int __bign_sign_update(struct ec_sign_context *ctx, 443*f0865ec9SKyle Evans const u8 *chunk, u32 chunklen, ec_alg_type key_type) 444*f0865ec9SKyle Evans { 445*f0865ec9SKyle Evans int ret; 446*f0865ec9SKyle Evans 447*f0865ec9SKyle Evans /* 448*f0865ec9SKyle Evans * First, verify context has been initialized and private 449*f0865ec9SKyle Evans * part too. This guarantees the context is an BIGN 450*f0865ec9SKyle Evans * signature one and we do not update() or finalize() 451*f0865ec9SKyle Evans * before init(). 452*f0865ec9SKyle Evans */ 453*f0865ec9SKyle Evans ret = sig_sign_check_initialized(ctx); EG(ret, err); 454*f0865ec9SKyle Evans BIGN_SIGN_CHECK_INITIALIZED(&(ctx->sign_data.bign), ret, err); 455*f0865ec9SKyle Evans 456*f0865ec9SKyle Evans /* Additional sanity checks on input params from context */ 457*f0865ec9SKyle Evans ret = key_pair_check_initialized_and_type(ctx->key_pair, key_type); EG(ret, err); 458*f0865ec9SKyle Evans 459*f0865ec9SKyle Evans /* 1. Compute h = H(m) */ 460*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 461*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 462*f0865ec9SKyle Evans ret = ctx->h->hfunc_update(&(ctx->sign_data.bign.h_ctx), chunk, chunklen); 463*f0865ec9SKyle Evans 464*f0865ec9SKyle Evans err: 465*f0865ec9SKyle Evans return ret; 466*f0865ec9SKyle Evans } 467*f0865ec9SKyle Evans 468*f0865ec9SKyle Evans int __bign_sign_finalize(struct ec_sign_context *ctx, u8 *sig, u8 siglen, 469*f0865ec9SKyle Evans ec_alg_type key_type) 470*f0865ec9SKyle Evans { 471*f0865ec9SKyle Evans int ret, cmp; 472*f0865ec9SKyle Evans const ec_priv_key *priv_key; 473*f0865ec9SKyle Evans prj_pt_src_t G; 474*f0865ec9SKyle Evans u8 hash[MAX_DIGEST_SIZE]; 475*f0865ec9SKyle Evans u8 hash_belt[BELT_HASH_DIGEST_SIZE]; 476*f0865ec9SKyle Evans u8 FE2OS_W[LOCAL_MAX(2 * BYTECEIL(CURVES_MAX_P_BIT_LEN), 2 * BIGN_S0_LEN(CURVES_MAX_Q_BIT_LEN))]; 477*f0865ec9SKyle Evans bitcnt_t q_bit_len, p_bit_len; 478*f0865ec9SKyle Evans prj_pt kG; 479*f0865ec9SKyle Evans nn_src_t q, x; 480*f0865ec9SKyle Evans u8 hsize, p_len, l; 481*f0865ec9SKyle Evans nn k, h, tmp, s1; 482*f0865ec9SKyle Evans belt_hash_context belt_hash_ctx; 483*f0865ec9SKyle Evans const u8 *oid_ptr = NULL; 484*f0865ec9SKyle Evans u16 oid_len = 0; 485*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 486*f0865ec9SKyle Evans /* b is the blinding mask */ 487*f0865ec9SKyle Evans nn b, binv; 488*f0865ec9SKyle Evans b.magic = binv.magic = WORD(0); 489*f0865ec9SKyle Evans #endif 490*f0865ec9SKyle Evans 491*f0865ec9SKyle Evans k.magic = h.magic = WORD(0); 492*f0865ec9SKyle Evans tmp.magic = s1.magic = WORD(0); 493*f0865ec9SKyle Evans kG.magic = WORD(0); 494*f0865ec9SKyle Evans 495*f0865ec9SKyle Evans /* 496*f0865ec9SKyle Evans * First, verify context has been initialized and private 497*f0865ec9SKyle Evans * part too. This guarantees the context is an BIGN 498*f0865ec9SKyle Evans * signature one and we do not finalize() before init(). 499*f0865ec9SKyle Evans */ 500*f0865ec9SKyle Evans ret = sig_sign_check_initialized(ctx); EG(ret, err); 501*f0865ec9SKyle Evans BIGN_SIGN_CHECK_INITIALIZED(&(ctx->sign_data.bign), ret, err); 502*f0865ec9SKyle Evans MUST_HAVE((sig != NULL), ret, err); 503*f0865ec9SKyle Evans 504*f0865ec9SKyle Evans /* Additional sanity checks on input params from context */ 505*f0865ec9SKyle Evans ret = key_pair_check_initialized_and_type(ctx->key_pair, key_type); EG(ret, err); 506*f0865ec9SKyle Evans 507*f0865ec9SKyle Evans /* Zero init out point */ 508*f0865ec9SKyle Evans ret = local_memset(&kG, 0, sizeof(prj_pt)); EG(ret, err); 509*f0865ec9SKyle Evans 510*f0865ec9SKyle Evans /* Make things more readable */ 511*f0865ec9SKyle Evans priv_key = &(ctx->key_pair->priv_key); 512*f0865ec9SKyle Evans q = &(priv_key->params->ec_gen_order); 513*f0865ec9SKyle Evans q_bit_len = priv_key->params->ec_gen_order_bitlen; 514*f0865ec9SKyle Evans p_bit_len = priv_key->params->ec_fp.p_bitlen; 515*f0865ec9SKyle Evans G = &(priv_key->params->ec_gen); 516*f0865ec9SKyle Evans p_len = (u8)BYTECEIL(p_bit_len); 517*f0865ec9SKyle Evans x = &(priv_key->x); 518*f0865ec9SKyle Evans hsize = ctx->h->digest_size; 519*f0865ec9SKyle Evans 520*f0865ec9SKyle Evans MUST_HAVE((priv_key->key_type == key_type), ret, err); 521*f0865ec9SKyle Evans 522*f0865ec9SKyle Evans /* Compute l depending on the order */ 523*f0865ec9SKyle Evans l = (u8)BIGN_S0_LEN(q_bit_len); 524*f0865ec9SKyle Evans 525*f0865ec9SKyle Evans /* Sanity check */ 526*f0865ec9SKyle Evans ret = nn_cmp(x, q, &cmp); EG(ret, err); 527*f0865ec9SKyle Evans /* This should not happen and means that our 528*f0865ec9SKyle Evans * private key is not compliant! 529*f0865ec9SKyle Evans */ 530*f0865ec9SKyle Evans MUST_HAVE((cmp < 0), ret, err); 531*f0865ec9SKyle Evans 532*f0865ec9SKyle Evans dbg_nn_print("p", &(priv_key->params->ec_fp.p)); 533*f0865ec9SKyle Evans dbg_nn_print("q", &(priv_key->params->ec_gen_order)); 534*f0865ec9SKyle Evans dbg_priv_key_print("x", priv_key); 535*f0865ec9SKyle Evans dbg_ec_point_print("G", &(priv_key->params->ec_gen)); 536*f0865ec9SKyle Evans dbg_pub_key_print("Y", &(ctx->key_pair->pub_key)); 537*f0865ec9SKyle Evans 538*f0865ec9SKyle Evans /* Check given signature buffer length has the expected size */ 539*f0865ec9SKyle Evans MUST_HAVE((siglen == BIGN_SIGLEN(q_bit_len)), ret, err); 540*f0865ec9SKyle Evans 541*f0865ec9SKyle Evans /* We check that our additional data is not NULL as it must contain 542*f0865ec9SKyle Evans * the mandatory external hash OID. 543*f0865ec9SKyle Evans */ 544*f0865ec9SKyle Evans MUST_HAVE((ctx->adata != NULL) && (ctx->adata_len != 0), ret, err); 545*f0865ec9SKyle Evans 546*f0865ec9SKyle Evans /* 1. Compute h = H(m) */ 547*f0865ec9SKyle Evans ret = local_memset(hash, 0, hsize); EG(ret, err); 548*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 549*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 550*f0865ec9SKyle Evans ret = ctx->h->hfunc_finalize(&(ctx->sign_data.bign.h_ctx), hash); EG(ret, err); 551*f0865ec9SKyle Evans dbg_buf_print("h", hash, hsize); 552*f0865ec9SKyle Evans 553*f0865ec9SKyle Evans 554*f0865ec9SKyle Evans /* 2. get a random value k in ]0,q[ */ 555*f0865ec9SKyle Evans #ifdef NO_KNOWN_VECTORS 556*f0865ec9SKyle Evans /* NOTE: when we do not need self tests for known vectors, 557*f0865ec9SKyle Evans * we can be strict about random function handler! 558*f0865ec9SKyle Evans * This allows us to avoid the corruption of such a pointer. 559*f0865ec9SKyle Evans */ 560*f0865ec9SKyle Evans /* Sanity check on the handler before calling it */ 561*f0865ec9SKyle Evans if(ctx->rand != nn_get_random_mod){ 562*f0865ec9SKyle Evans #ifdef WITH_SIG_DBIGN 563*f0865ec9SKyle Evans /* In deterministic BIGN, nevermind! */ 564*f0865ec9SKyle Evans if(key_type != DBIGN) 565*f0865ec9SKyle Evans #endif 566*f0865ec9SKyle Evans { 567*f0865ec9SKyle Evans ret = -1; 568*f0865ec9SKyle Evans goto err; 569*f0865ec9SKyle Evans } 570*f0865ec9SKyle Evans } 571*f0865ec9SKyle Evans #endif 572*f0865ec9SKyle Evans if(ctx->rand != NULL){ 573*f0865ec9SKyle Evans /* Non-deterministic generation, or deterministic with 574*f0865ec9SKyle Evans * test vectors. 575*f0865ec9SKyle Evans */ 576*f0865ec9SKyle Evans ret = ctx->rand(&k, q); 577*f0865ec9SKyle Evans } 578*f0865ec9SKyle Evans else 579*f0865ec9SKyle Evans #if defined(WITH_SIG_DBIGN) 580*f0865ec9SKyle Evans { 581*f0865ec9SKyle Evans /* Only applies for DETERMINISTIC BIGN */ 582*f0865ec9SKyle Evans if(key_type != DBIGN){ 583*f0865ec9SKyle Evans ret = -1; 584*f0865ec9SKyle Evans goto err; 585*f0865ec9SKyle Evans } 586*f0865ec9SKyle Evans /* Deterministically generate k as STB 34.101.45 mandates */ 587*f0865ec9SKyle Evans ret = __bign_determinitic_nonce(&k, q, q_bit_len, &(priv_key->x), ctx->adata, ctx->adata_len, hash, hsize); 588*f0865ec9SKyle Evans } 589*f0865ec9SKyle Evans #else 590*f0865ec9SKyle Evans { 591*f0865ec9SKyle Evans /* NULL rand function is not accepted for regular BIGN */ 592*f0865ec9SKyle Evans ret = -1; 593*f0865ec9SKyle Evans goto err; 594*f0865ec9SKyle Evans } 595*f0865ec9SKyle Evans #endif 596*f0865ec9SKyle Evans if (ret) { 597*f0865ec9SKyle Evans ret = -1; 598*f0865ec9SKyle Evans goto err; 599*f0865ec9SKyle Evans } 600*f0865ec9SKyle Evans dbg_nn_print("k", &k); 601*f0865ec9SKyle Evans 602*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 603*f0865ec9SKyle Evans /* Note: if we use blinding, r and e are multiplied by 604*f0865ec9SKyle Evans * a random value b in ]0,q[ */ 605*f0865ec9SKyle Evans ret = nn_get_random_mod(&b, q); EG(ret, err); 606*f0865ec9SKyle Evans /* NOTE: we use Fermat's little theorem inversion for 607*f0865ec9SKyle Evans * constant time here. This is possible since q is prime. 608*f0865ec9SKyle Evans */ 609*f0865ec9SKyle Evans ret = nn_modinv_fermat(&binv, &b, q); EG(ret, err); 610*f0865ec9SKyle Evans 611*f0865ec9SKyle Evans dbg_nn_print("b", &b); 612*f0865ec9SKyle Evans #endif /* USE_SIG_BLINDING */ 613*f0865ec9SKyle Evans 614*f0865ec9SKyle Evans 615*f0865ec9SKyle Evans /* 3. Compute W = (W_x,W_y) = kG */ 616*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 617*f0865ec9SKyle Evans ret = prj_pt_mul_blind(&kG, &k, G); EG(ret, err); 618*f0865ec9SKyle Evans #else 619*f0865ec9SKyle Evans ret = prj_pt_mul(&kG, &k, G); EG(ret, err); 620*f0865ec9SKyle Evans #endif /* USE_SIG_BLINDING */ 621*f0865ec9SKyle Evans ret = prj_pt_unique(&kG, &kG); EG(ret, err); 622*f0865ec9SKyle Evans 623*f0865ec9SKyle Evans dbg_nn_print("W_x", &(kG.X.fp_val)); 624*f0865ec9SKyle Evans dbg_nn_print("W_y", &(kG.Y.fp_val)); 625*f0865ec9SKyle Evans 626*f0865ec9SKyle Evans /* 4. Compute s0 = <BELT-HASH(OID(H) || <<FE2OS(W_x)> || <FE2OS(W_y)>>2*l || H(X))>l */ 627*f0865ec9SKyle Evans ret = belt_hash_init(&belt_hash_ctx); EG(ret, err); 628*f0865ec9SKyle Evans ret = bign_get_oid_from_adata(ctx->adata, ctx->adata_len, &oid_ptr, &oid_len); EG(ret, err); 629*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, oid_ptr, oid_len); EG(ret, err); 630*f0865ec9SKyle Evans /**/ 631*f0865ec9SKyle Evans ret = local_memset(FE2OS_W, 0, sizeof(FE2OS_W)); EG(ret, err); 632*f0865ec9SKyle Evans ret = fp_export_to_buf(&FE2OS_W[0], p_len, &(kG.X)); EG(ret, err); 633*f0865ec9SKyle Evans ret = _reverse_endianness(&FE2OS_W[0], p_len); EG(ret, err); 634*f0865ec9SKyle Evans ret = fp_export_to_buf(&FE2OS_W[p_len], p_len, &(kG.Y)); EG(ret, err); 635*f0865ec9SKyle Evans ret = _reverse_endianness(&FE2OS_W[p_len], p_len); EG(ret, err); 636*f0865ec9SKyle Evans /* Only hash the 2*l bytes of FE2OS(W_x) || FE2OS(W_y) */ 637*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, &FE2OS_W[0], (u32)(2*l)); EG(ret, err); 638*f0865ec9SKyle Evans /**/ 639*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, hash, hsize); EG(ret, err); 640*f0865ec9SKyle Evans /* Store our s0 */ 641*f0865ec9SKyle Evans ret = local_memset(hash_belt, 0, sizeof(hash_belt)); EG(ret, err); 642*f0865ec9SKyle Evans ret = belt_hash_final(&belt_hash_ctx, hash_belt); EG(ret, err); 643*f0865ec9SKyle Evans ret = local_memset(&sig[0], 0, l); EG(ret, err); 644*f0865ec9SKyle Evans ret = local_memcpy(&sig[0], &hash_belt[0], LOCAL_MIN(l, BELT_HASH_DIGEST_SIZE)); EG(ret, err); 645*f0865ec9SKyle Evans dbg_buf_print("s0", &sig[0], LOCAL_MIN(l, BELT_HASH_DIGEST_SIZE)); 646*f0865ec9SKyle Evans 647*f0865ec9SKyle Evans /* 5. Now compute s1 = (k - H_bar - (s0_bar + 2**l) * d) mod q */ 648*f0865ec9SKyle Evans /* First import H and s0 as numbers modulo q */ 649*f0865ec9SKyle Evans /* Import H */ 650*f0865ec9SKyle Evans ret = _reverse_endianness(hash, hsize); EG(ret, err); 651*f0865ec9SKyle Evans ret = nn_init_from_buf(&h, hash, hsize); EG(ret, err); 652*f0865ec9SKyle Evans ret = nn_mod(&h, &h, q); EG(ret, err); 653*f0865ec9SKyle Evans /* Import s0_bar */ 654*f0865ec9SKyle Evans ret = local_memcpy(FE2OS_W, &sig[0], l); EG(ret, err); 655*f0865ec9SKyle Evans ret = _reverse_endianness(FE2OS_W, l); EG(ret, err); 656*f0865ec9SKyle Evans ret = nn_init_from_buf(&s1, FE2OS_W, l); EG(ret, err); 657*f0865ec9SKyle Evans ret = nn_mod(&s1, &s1, q); EG(ret, err); 658*f0865ec9SKyle Evans /* Compute (s0_bar + 2**l) * d */ 659*f0865ec9SKyle Evans ret = nn_init(&tmp, 0); EG(ret, err); 660*f0865ec9SKyle Evans ret = nn_one(&tmp); EG(ret, err); 661*f0865ec9SKyle Evans ret = nn_lshift(&tmp, &tmp, (bitcnt_t)(8*l)); EG(ret, err); 662*f0865ec9SKyle Evans ret = nn_mod(&tmp, &tmp, q); EG(ret, err); 663*f0865ec9SKyle Evans ret = nn_mod_add(&s1, &s1, &tmp, q); EG(ret, err); 664*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 665*f0865ec9SKyle Evans /* Blind s1 with b */ 666*f0865ec9SKyle Evans ret = nn_mod_mul(&s1, &s1, &b, q); EG(ret, err); 667*f0865ec9SKyle Evans 668*f0865ec9SKyle Evans /* Blind the message hash */ 669*f0865ec9SKyle Evans ret = nn_mod_mul(&h, &h, &b, q); EG(ret, err); 670*f0865ec9SKyle Evans 671*f0865ec9SKyle Evans /* Blind the nonce */ 672*f0865ec9SKyle Evans ret = nn_mod_mul(&k, &k, &b, q); EG(ret, err); 673*f0865ec9SKyle Evans #endif /* USE_SIG_BLINDING */ 674*f0865ec9SKyle Evans 675*f0865ec9SKyle Evans ret = nn_mod_mul(&s1, &s1, &(priv_key->x), q); EG(ret, err); 676*f0865ec9SKyle Evans ret = nn_mod_sub(&s1, &k, &s1, q); EG(ret, err); 677*f0865ec9SKyle Evans ret = nn_mod_sub(&s1, &s1, &h, q); EG(ret, err); 678*f0865ec9SKyle Evans 679*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 680*f0865ec9SKyle Evans /* Unblind s1 */ 681*f0865ec9SKyle Evans ret = nn_mod_mul(&s1, &s1, &binv, q); EG(ret, err); 682*f0865ec9SKyle Evans #endif 683*f0865ec9SKyle Evans dbg_nn_print("s1", &s1); 684*f0865ec9SKyle Evans 685*f0865ec9SKyle Evans /* Clean hash buffer as we do not need it anymore */ 686*f0865ec9SKyle Evans ret = local_memset(hash, 0, hsize); EG(ret, err); 687*f0865ec9SKyle Evans 688*f0865ec9SKyle Evans /* Now export s1 and reverse its endianness */ 689*f0865ec9SKyle Evans ret = nn_export_to_buf(&sig[l], (u16)BIGN_S1_LEN(q_bit_len), &s1); EG(ret, err); 690*f0865ec9SKyle Evans ret = _reverse_endianness(&sig[l], (u16)BIGN_S1_LEN(q_bit_len)); 691*f0865ec9SKyle Evans 692*f0865ec9SKyle Evans err: 693*f0865ec9SKyle Evans nn_uninit(&k); 694*f0865ec9SKyle Evans nn_uninit(&h); 695*f0865ec9SKyle Evans nn_uninit(&tmp); 696*f0865ec9SKyle Evans nn_uninit(&s1); 697*f0865ec9SKyle Evans prj_pt_uninit(&kG); 698*f0865ec9SKyle Evans #ifdef USE_SIG_BLINDING 699*f0865ec9SKyle Evans nn_uninit(&b); 700*f0865ec9SKyle Evans nn_uninit(&binv); 701*f0865ec9SKyle Evans #endif 702*f0865ec9SKyle Evans 703*f0865ec9SKyle Evans /* 704*f0865ec9SKyle Evans * We can now clear data part of the context. This will clear 705*f0865ec9SKyle Evans * magic and avoid further reuse of the whole context. 706*f0865ec9SKyle Evans */ 707*f0865ec9SKyle Evans if(ctx != NULL){ 708*f0865ec9SKyle Evans IGNORE_RET_VAL(local_memset(&(ctx->sign_data.bign), 0, sizeof(bign_sign_data))); 709*f0865ec9SKyle Evans } 710*f0865ec9SKyle Evans 711*f0865ec9SKyle Evans /* Clean what remains on the stack */ 712*f0865ec9SKyle Evans PTR_NULLIFY(priv_key); 713*f0865ec9SKyle Evans PTR_NULLIFY(G); 714*f0865ec9SKyle Evans PTR_NULLIFY(q); 715*f0865ec9SKyle Evans PTR_NULLIFY(x); 716*f0865ec9SKyle Evans PTR_NULLIFY(oid_ptr); 717*f0865ec9SKyle Evans VAR_ZEROIFY(q_bit_len); 718*f0865ec9SKyle Evans VAR_ZEROIFY(hsize); 719*f0865ec9SKyle Evans VAR_ZEROIFY(oid_len); 720*f0865ec9SKyle Evans 721*f0865ec9SKyle Evans return ret; 722*f0865ec9SKyle Evans } 723*f0865ec9SKyle Evans 724*f0865ec9SKyle Evans /* 725*f0865ec9SKyle Evans * Generic *internal* BIGN verification functions (init, update and finalize). 726*f0865ec9SKyle Evans * Their purpose is to allow passing a specific hash function (along with 727*f0865ec9SKyle Evans * its output size) and the random ephemeral key k, so that compliance 728*f0865ec9SKyle Evans * tests against test vectors can be made without ugly hack in the code 729*f0865ec9SKyle Evans * itself. 730*f0865ec9SKyle Evans * 731*f0865ec9SKyle Evans * Implementation notes: 732*f0865ec9SKyle Evans * 733*f0865ec9SKyle Evans * a) The BIGN algorithm makes use of the OID of the external hash function. 734*f0865ec9SKyle Evans * We let the upper layer provide us with this in the "adata" field of the 735*f0865ec9SKyle Evans * context. 736*f0865ec9SKyle Evans */ 737*f0865ec9SKyle Evans 738*f0865ec9SKyle Evans #define BIGN_VERIFY_MAGIC ((word_t)(0xceff8344927346abULL)) 739*f0865ec9SKyle Evans #define BIGN_VERIFY_CHECK_INITIALIZED(A, ret, err) \ 740*f0865ec9SKyle Evans MUST_HAVE((((void *)(A)) != NULL) && ((A)->magic == BIGN_VERIFY_MAGIC), ret, err) 741*f0865ec9SKyle Evans 742*f0865ec9SKyle Evans int __bign_verify_init(struct ec_verify_context *ctx, const u8 *sig, u8 siglen, 743*f0865ec9SKyle Evans ec_alg_type key_type) 744*f0865ec9SKyle Evans { 745*f0865ec9SKyle Evans bitcnt_t q_bit_len; 746*f0865ec9SKyle Evans nn_src_t q; 747*f0865ec9SKyle Evans nn *s0, *s1; 748*f0865ec9SKyle Evans u8 *s0_sig; 749*f0865ec9SKyle Evans u8 TMP[BYTECEIL(CURVES_MAX_Q_BIT_LEN)]; 750*f0865ec9SKyle Evans u8 l; 751*f0865ec9SKyle Evans int ret, cmp; 752*f0865ec9SKyle Evans 753*f0865ec9SKyle Evans /* First, verify context has been initialized */ 754*f0865ec9SKyle Evans ret = sig_verify_check_initialized(ctx); EG(ret, err); 755*f0865ec9SKyle Evans 756*f0865ec9SKyle Evans ret = local_memset(TMP, 0, sizeof(TMP)); EG(ret, err); 757*f0865ec9SKyle Evans 758*f0865ec9SKyle Evans /* Do some sanity checks on input params */ 759*f0865ec9SKyle Evans ret = pub_key_check_initialized_and_type(ctx->pub_key, key_type); EG(ret, err); 760*f0865ec9SKyle Evans MUST_HAVE((ctx->h != NULL) && (ctx->h->digest_size <= MAX_DIGEST_SIZE) && 761*f0865ec9SKyle Evans (ctx->h->block_size <= MAX_BLOCK_SIZE), ret, err); 762*f0865ec9SKyle Evans MUST_HAVE((sig != NULL), ret, err); 763*f0865ec9SKyle Evans 764*f0865ec9SKyle Evans /* We check that our additional data is not NULL as it must contain 765*f0865ec9SKyle Evans * the mandatory external hash OID. 766*f0865ec9SKyle Evans */ 767*f0865ec9SKyle Evans MUST_HAVE((ctx->adata != NULL) && (ctx->adata_len != 0), ret, err); 768*f0865ec9SKyle Evans 769*f0865ec9SKyle Evans /* Make things more readable */ 770*f0865ec9SKyle Evans q = &(ctx->pub_key->params->ec_gen_order); 771*f0865ec9SKyle Evans q_bit_len = ctx->pub_key->params->ec_gen_order_bitlen; 772*f0865ec9SKyle Evans s0 = &(ctx->verify_data.bign.s0); 773*f0865ec9SKyle Evans s1 = &(ctx->verify_data.bign.s1); 774*f0865ec9SKyle Evans s0_sig = (u8*)(&(ctx->verify_data.bign.s0_sig)); 775*f0865ec9SKyle Evans 776*f0865ec9SKyle Evans /* Compute l depending on the order */ 777*f0865ec9SKyle Evans l = (u8)BIGN_S0_LEN(q_bit_len); 778*f0865ec9SKyle Evans 779*f0865ec9SKyle Evans /* Check given signature length is the expected one */ 780*f0865ec9SKyle Evans MUST_HAVE((siglen == BIGN_SIGLEN(q_bit_len)), ret, err); 781*f0865ec9SKyle Evans 782*f0865ec9SKyle Evans /* Copy s0 to be checked later */ 783*f0865ec9SKyle Evans ret = local_memcpy(s0_sig, sig, l); EG(ret, err); 784*f0865ec9SKyle Evans 785*f0865ec9SKyle Evans /* Import s0 and s1 values from signature buffer */ 786*f0865ec9SKyle Evans ret = local_memcpy(&TMP[0], sig, l); EG(ret, err); 787*f0865ec9SKyle Evans ret = _reverse_endianness(&TMP[0], l); EG(ret, err); 788*f0865ec9SKyle Evans ret = nn_init_from_buf(s0, &TMP[0], l); EG(ret, err); 789*f0865ec9SKyle Evans /**/ 790*f0865ec9SKyle Evans ret = local_memcpy(&TMP[0], &sig[l], (u32)BIGN_S1_LEN(q_bit_len)); EG(ret, err); 791*f0865ec9SKyle Evans ret = _reverse_endianness(&TMP[0], (u16)BIGN_S1_LEN(q_bit_len)); EG(ret, err); 792*f0865ec9SKyle Evans ret = nn_init_from_buf(s1, &TMP[0], (u8)BIGN_S1_LEN(q_bit_len)); EG(ret, err); 793*f0865ec9SKyle Evans dbg_nn_print("s0", s0); 794*f0865ec9SKyle Evans dbg_nn_print("s1", s1); 795*f0865ec9SKyle Evans 796*f0865ec9SKyle Evans /* 1. Reject the signature if s1 >= q */ 797*f0865ec9SKyle Evans ret = nn_cmp(s1, q, &cmp); EG(ret, err); 798*f0865ec9SKyle Evans MUST_HAVE((cmp < 0), ret, err); 799*f0865ec9SKyle Evans 800*f0865ec9SKyle Evans /* Initialize the remaining of verify context. */ 801*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 802*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 803*f0865ec9SKyle Evans ret = ctx->h->hfunc_init(&(ctx->verify_data.bign.h_ctx)); EG(ret, err); 804*f0865ec9SKyle Evans 805*f0865ec9SKyle Evans ctx->verify_data.bign.magic = BIGN_VERIFY_MAGIC; 806*f0865ec9SKyle Evans 807*f0865ec9SKyle Evans err: 808*f0865ec9SKyle Evans VAR_ZEROIFY(q_bit_len); 809*f0865ec9SKyle Evans PTR_NULLIFY(q); 810*f0865ec9SKyle Evans PTR_NULLIFY(s0); 811*f0865ec9SKyle Evans PTR_NULLIFY(s1); 812*f0865ec9SKyle Evans PTR_NULLIFY(s0_sig); 813*f0865ec9SKyle Evans 814*f0865ec9SKyle Evans return ret; 815*f0865ec9SKyle Evans } 816*f0865ec9SKyle Evans 817*f0865ec9SKyle Evans int __bign_verify_update(struct ec_verify_context *ctx, 818*f0865ec9SKyle Evans const u8 *chunk, u32 chunklen, ec_alg_type key_type) 819*f0865ec9SKyle Evans { 820*f0865ec9SKyle Evans int ret; 821*f0865ec9SKyle Evans 822*f0865ec9SKyle Evans /* 823*f0865ec9SKyle Evans * First, verify context has been initialized and public 824*f0865ec9SKyle Evans * part too. This guarantees the context is an BIGN 825*f0865ec9SKyle Evans * verification one and we do not update() or finalize() 826*f0865ec9SKyle Evans * before init(). 827*f0865ec9SKyle Evans */ 828*f0865ec9SKyle Evans ret = sig_verify_check_initialized(ctx); EG(ret, err); 829*f0865ec9SKyle Evans BIGN_VERIFY_CHECK_INITIALIZED(&(ctx->verify_data.bign), ret, err); 830*f0865ec9SKyle Evans /* Do some sanity checks on input params */ 831*f0865ec9SKyle Evans ret = pub_key_check_initialized_and_type(ctx->pub_key, key_type); EG(ret, err); 832*f0865ec9SKyle Evans 833*f0865ec9SKyle Evans /* 2. Compute h = H(m) */ 834*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 835*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 836*f0865ec9SKyle Evans ret = ctx->h->hfunc_update(&(ctx->verify_data.bign.h_ctx), chunk, chunklen); 837*f0865ec9SKyle Evans 838*f0865ec9SKyle Evans err: 839*f0865ec9SKyle Evans return ret; 840*f0865ec9SKyle Evans } 841*f0865ec9SKyle Evans 842*f0865ec9SKyle Evans int __bign_verify_finalize(struct ec_verify_context *ctx, 843*f0865ec9SKyle Evans ec_alg_type key_type) 844*f0865ec9SKyle Evans { 845*f0865ec9SKyle Evans prj_pt uG, vY; 846*f0865ec9SKyle Evans prj_pt_src_t G, Y; 847*f0865ec9SKyle Evans prj_pt_t W; 848*f0865ec9SKyle Evans u8 hash[MAX_DIGEST_SIZE]; 849*f0865ec9SKyle Evans u8 hash_belt[BELT_HASH_DIGEST_SIZE]; 850*f0865ec9SKyle Evans u8 t[BIGN_S0_LEN(CURVES_MAX_Q_BIT_LEN)]; 851*f0865ec9SKyle Evans u8 FE2OS_W[LOCAL_MAX(2 * BYTECEIL(CURVES_MAX_P_BIT_LEN), 2 * BIGN_S0_LEN(CURVES_MAX_Q_BIT_LEN))]; 852*f0865ec9SKyle Evans bitcnt_t p_bit_len, q_bit_len; 853*f0865ec9SKyle Evans nn_src_t q; 854*f0865ec9SKyle Evans nn h, tmp; 855*f0865ec9SKyle Evans nn *s0, *s1; 856*f0865ec9SKyle Evans u8 *s0_sig; 857*f0865ec9SKyle Evans u8 hsize, p_len, l; 858*f0865ec9SKyle Evans belt_hash_context belt_hash_ctx; 859*f0865ec9SKyle Evans int ret, iszero, cmp; 860*f0865ec9SKyle Evans const u8 *oid_ptr = NULL; 861*f0865ec9SKyle Evans u16 oid_len = 0; 862*f0865ec9SKyle Evans 863*f0865ec9SKyle Evans h.magic = tmp.magic = WORD(0); 864*f0865ec9SKyle Evans uG.magic = vY.magic = WORD(0); 865*f0865ec9SKyle Evans 866*f0865ec9SKyle Evans /* NOTE: we reuse uG for W to optimize local variables */ 867*f0865ec9SKyle Evans W = &uG; 868*f0865ec9SKyle Evans 869*f0865ec9SKyle Evans /* 870*f0865ec9SKyle Evans * First, verify context has been initialized and public 871*f0865ec9SKyle Evans * part too. This guarantees the context is an BIGN 872*f0865ec9SKyle Evans * verification one and we do not finalize() before init(). 873*f0865ec9SKyle Evans */ 874*f0865ec9SKyle Evans ret = sig_verify_check_initialized(ctx); EG(ret, err); 875*f0865ec9SKyle Evans BIGN_VERIFY_CHECK_INITIALIZED(&(ctx->verify_data.bign), ret, err); 876*f0865ec9SKyle Evans /* Do some sanity checks on input params */ 877*f0865ec9SKyle Evans ret = pub_key_check_initialized_and_type(ctx->pub_key, key_type); EG(ret, err); 878*f0865ec9SKyle Evans 879*f0865ec9SKyle Evans /* We check that our additional data is not NULL as it must contain 880*f0865ec9SKyle Evans * the mandatory external hash OID. 881*f0865ec9SKyle Evans */ 882*f0865ec9SKyle Evans MUST_HAVE((ctx->adata != NULL) && (ctx->adata_len != 0), ret, err); 883*f0865ec9SKyle Evans 884*f0865ec9SKyle Evans /* Zero init points */ 885*f0865ec9SKyle Evans ret = local_memset(&uG, 0, sizeof(prj_pt)); EG(ret, err); 886*f0865ec9SKyle Evans ret = local_memset(&vY, 0, sizeof(prj_pt)); EG(ret, err); 887*f0865ec9SKyle Evans 888*f0865ec9SKyle Evans /* Make things more readable */ 889*f0865ec9SKyle Evans G = &(ctx->pub_key->params->ec_gen); 890*f0865ec9SKyle Evans Y = &(ctx->pub_key->y); 891*f0865ec9SKyle Evans q = &(ctx->pub_key->params->ec_gen_order); 892*f0865ec9SKyle Evans p_bit_len = ctx->pub_key->params->ec_fp.p_bitlen; 893*f0865ec9SKyle Evans q_bit_len = ctx->pub_key->params->ec_gen_order_bitlen; 894*f0865ec9SKyle Evans p_len = (u8)BYTECEIL(p_bit_len); 895*f0865ec9SKyle Evans hsize = ctx->h->digest_size; 896*f0865ec9SKyle Evans s0 = &(ctx->verify_data.bign.s0); 897*f0865ec9SKyle Evans s1 = &(ctx->verify_data.bign.s1); 898*f0865ec9SKyle Evans s0_sig = (u8*)(&(ctx->verify_data.bign.s0_sig)); 899*f0865ec9SKyle Evans 900*f0865ec9SKyle Evans /* Sanity check */ 901*f0865ec9SKyle Evans MUST_HAVE((sizeof(t) == sizeof(ctx->verify_data.bign.s0_sig)), ret, err); 902*f0865ec9SKyle Evans 903*f0865ec9SKyle Evans /* Compute our l that is inherited from q size */ 904*f0865ec9SKyle Evans l = (u8)BIGN_S0_LEN(q_bit_len); 905*f0865ec9SKyle Evans 906*f0865ec9SKyle Evans /* 2. Compute h = H(m) */ 907*f0865ec9SKyle Evans /* Since we call a callback, sanity check our mapping */ 908*f0865ec9SKyle Evans ret = hash_mapping_callbacks_sanity_check(ctx->h); EG(ret, err); 909*f0865ec9SKyle Evans ret = ctx->h->hfunc_finalize(&(ctx->verify_data.bign.h_ctx), hash); EG(ret, err); 910*f0865ec9SKyle Evans dbg_buf_print("h = H(m)", hash, hsize); 911*f0865ec9SKyle Evans 912*f0865ec9SKyle Evans /* Import H */ 913*f0865ec9SKyle Evans ret = _reverse_endianness(hash, hsize); EG(ret, err); 914*f0865ec9SKyle Evans ret = nn_init_from_buf(&h, hash, hsize); EG(ret, err); 915*f0865ec9SKyle Evans ret = nn_mod(&h, &h, q); EG(ret, err); 916*f0865ec9SKyle Evans /* NOTE: we reverse endianness again of the hash since we will 917*f0865ec9SKyle Evans * have to use the original value. 918*f0865ec9SKyle Evans */ 919*f0865ec9SKyle Evans ret = _reverse_endianness(hash, hsize); EG(ret, err); 920*f0865ec9SKyle Evans 921*f0865ec9SKyle Evans /* Compute ((s1_bar + h_bar) mod q) */ 922*f0865ec9SKyle Evans ret = nn_mod_add(&h, &h, s1, q); EG(ret, err); 923*f0865ec9SKyle Evans /* Compute (s0_bar + 2**l) mod q */ 924*f0865ec9SKyle Evans ret = nn_init(&tmp, 0); EG(ret, err); 925*f0865ec9SKyle Evans ret = nn_one(&tmp); EG(ret, err); 926*f0865ec9SKyle Evans ret = nn_lshift(&tmp, &tmp, (bitcnt_t)(8*l)); EG(ret, err); 927*f0865ec9SKyle Evans ret = nn_mod(&tmp, &tmp, q); EG(ret, err); 928*f0865ec9SKyle Evans ret = nn_mod_add(&tmp, &tmp, s0, q); EG(ret, err); 929*f0865ec9SKyle Evans 930*f0865ec9SKyle Evans /* 3. Compute ((s1_bar + h_bar) mod q) * G + ((s0_bar + 2**l) mod q) * Y. */ 931*f0865ec9SKyle Evans ret = prj_pt_mul(&uG, &h, G); EG(ret, err); 932*f0865ec9SKyle Evans ret = prj_pt_mul(&vY, &tmp, Y); EG(ret, err); 933*f0865ec9SKyle Evans ret = prj_pt_add(W, &uG, &vY); EG(ret, err); 934*f0865ec9SKyle Evans /* 5. If the result is point at infinity, return false. */ 935*f0865ec9SKyle Evans ret = prj_pt_iszero(W, &iszero); EG(ret, err); 936*f0865ec9SKyle Evans MUST_HAVE((!iszero), ret, err); 937*f0865ec9SKyle Evans ret = prj_pt_unique(W, W); EG(ret, err); 938*f0865ec9SKyle Evans 939*f0865ec9SKyle Evans /* 6. Compute t = <BELT-HASH(OID(H) || <<FE2OS(W_x)> || <FE2OS(W_y)>>2*l || H(X))>l */ 940*f0865ec9SKyle Evans ret = belt_hash_init(&belt_hash_ctx); EG(ret, err); 941*f0865ec9SKyle Evans ret = bign_get_oid_from_adata(ctx->adata, ctx->adata_len, &oid_ptr, &oid_len); EG(ret, err); 942*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, oid_ptr, oid_len); EG(ret, err); 943*f0865ec9SKyle Evans /**/ 944*f0865ec9SKyle Evans ret = local_memset(FE2OS_W, 0, sizeof(FE2OS_W)); EG(ret, err); 945*f0865ec9SKyle Evans ret = fp_export_to_buf(&FE2OS_W[0], p_len, &(W->X)); EG(ret, err); 946*f0865ec9SKyle Evans ret = _reverse_endianness(&FE2OS_W[0], p_len); EG(ret, err); 947*f0865ec9SKyle Evans ret = fp_export_to_buf(&FE2OS_W[p_len], p_len, &(W->Y)); EG(ret, err); 948*f0865ec9SKyle Evans ret = _reverse_endianness(&FE2OS_W[p_len], p_len); EG(ret, err); 949*f0865ec9SKyle Evans /* Only hash the 2*l bytes of FE2OS(W_x) || FE2OS(W_y) */ 950*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, &FE2OS_W[0], (u32)(2*l)); EG(ret, err); 951*f0865ec9SKyle Evans /**/ 952*f0865ec9SKyle Evans ret = belt_hash_update(&belt_hash_ctx, hash, hsize); EG(ret, err); 953*f0865ec9SKyle Evans /* Store our t */ 954*f0865ec9SKyle Evans ret = local_memset(hash_belt, 0, sizeof(hash_belt)); EG(ret, err); 955*f0865ec9SKyle Evans ret = belt_hash_final(&belt_hash_ctx, hash_belt); EG(ret, err); 956*f0865ec9SKyle Evans ret = local_memset(&t[0], 0, l); EG(ret, err); 957*f0865ec9SKyle Evans ret = local_memcpy(&t[0], &hash_belt[0], LOCAL_MIN(l, BELT_HASH_DIGEST_SIZE)); EG(ret, err); 958*f0865ec9SKyle Evans 959*f0865ec9SKyle Evans /* 10. Accept the signature if and only if t equals s0_sig' */ 960*f0865ec9SKyle Evans ret = are_equal(t, s0_sig, l, &cmp); EG(ret, err); 961*f0865ec9SKyle Evans ret = (cmp == 0) ? -1 : 0; 962*f0865ec9SKyle Evans 963*f0865ec9SKyle Evans err: 964*f0865ec9SKyle Evans prj_pt_uninit(&uG); 965*f0865ec9SKyle Evans prj_pt_uninit(&vY); 966*f0865ec9SKyle Evans nn_uninit(&h); 967*f0865ec9SKyle Evans nn_uninit(&tmp); 968*f0865ec9SKyle Evans 969*f0865ec9SKyle Evans /* 970*f0865ec9SKyle Evans * We can now clear data part of the context. This will clear 971*f0865ec9SKyle Evans * magic and avoid further reuse of the whole context. 972*f0865ec9SKyle Evans */ 973*f0865ec9SKyle Evans if(ctx != NULL){ 974*f0865ec9SKyle Evans IGNORE_RET_VAL(local_memset(&(ctx->verify_data.bign), 0, sizeof(bign_verify_data))); 975*f0865ec9SKyle Evans } 976*f0865ec9SKyle Evans 977*f0865ec9SKyle Evans /* Clean what remains on the stack */ 978*f0865ec9SKyle Evans PTR_NULLIFY(G); 979*f0865ec9SKyle Evans PTR_NULLIFY(Y); 980*f0865ec9SKyle Evans PTR_NULLIFY(W); 981*f0865ec9SKyle Evans VAR_ZEROIFY(p_bit_len); 982*f0865ec9SKyle Evans VAR_ZEROIFY(q_bit_len); 983*f0865ec9SKyle Evans VAR_ZEROIFY(p_len); 984*f0865ec9SKyle Evans PTR_NULLIFY(q); 985*f0865ec9SKyle Evans PTR_NULLIFY(s0); 986*f0865ec9SKyle Evans PTR_NULLIFY(s1); 987*f0865ec9SKyle Evans PTR_NULLIFY(s0_sig); 988*f0865ec9SKyle Evans PTR_NULLIFY(oid_ptr); 989*f0865ec9SKyle Evans VAR_ZEROIFY(hsize); 990*f0865ec9SKyle Evans VAR_ZEROIFY(oid_len); 991*f0865ec9SKyle Evans 992*f0865ec9SKyle Evans return ret; 993*f0865ec9SKyle Evans } 994*f0865ec9SKyle Evans 995*f0865ec9SKyle Evans #else /* defined(WITH_SIG_BIGN) || defined(WITH_SIG_DBIGN) */ 996*f0865ec9SKyle Evans 997*f0865ec9SKyle Evans /* 998*f0865ec9SKyle Evans * Dummy definition to avoid the empty translation unit ISO C warning 999*f0865ec9SKyle Evans */ 1000*f0865ec9SKyle Evans typedef int dummy; 1001*f0865ec9SKyle Evans #endif /* WITH_SIG_BIGN */ 1002