1*1694Sdarrenm /* 2*1694Sdarrenm * CDDL HEADER START 3*1694Sdarrenm * 4*1694Sdarrenm * The contents of this file are subject to the terms of the 5*1694Sdarrenm * Common Development and Distribution License (the "License"). 6*1694Sdarrenm * You may not use this file except in compliance with the License. 7*1694Sdarrenm * 8*1694Sdarrenm * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9*1694Sdarrenm * or http://www.opensolaris.org/os/licensing. 10*1694Sdarrenm * See the License for the specific language governing permissions 11*1694Sdarrenm * and limitations under the License. 12*1694Sdarrenm * 13*1694Sdarrenm * When distributing Covered Code, include this CDDL HEADER in each 14*1694Sdarrenm * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15*1694Sdarrenm * If applicable, add the following below this CDDL HEADER, with the 16*1694Sdarrenm * fields enclosed by brackets "[]" replaced with your own identifying 17*1694Sdarrenm * information: Portions Copyright [yyyy] [name of copyright owner] 18*1694Sdarrenm * 19*1694Sdarrenm * CDDL HEADER END 20*1694Sdarrenm */ 21*1694Sdarrenm 22*1694Sdarrenm /* 23*1694Sdarrenm * Copyright 2006 Sun Microsystems, Inc. All rights reserved. 24*1694Sdarrenm * Use is subject to license terms. 25*1694Sdarrenm */ 26*1694Sdarrenm 27*1694Sdarrenm #pragma ident "%Z%%M% %I% %E% SMI" 28*1694Sdarrenm 29*1694Sdarrenm #include <sys/modctl.h> 30*1694Sdarrenm #include <sys/cmn_err.h> 31*1694Sdarrenm #include <sys/crypto/common.h> 32*1694Sdarrenm #include <sys/crypto/spi.h> 33*1694Sdarrenm #include <sys/strsun.h> 34*1694Sdarrenm #include <sys/systm.h> 35*1694Sdarrenm #include <sys/sysmacros.h> 36*1694Sdarrenm #define _SHA2_IMPL 37*1694Sdarrenm #include <sys/sha2.h> 38*1694Sdarrenm 39*1694Sdarrenm /* 40*1694Sdarrenm * The sha2 module is created with two modlinkages: 41*1694Sdarrenm * - a modlmisc that allows consumers to directly call the entry points 42*1694Sdarrenm * SHA2Init, SHA2Update, and SHA2Final. 43*1694Sdarrenm * - a modlcrypto that allows the module to register with the Kernel 44*1694Sdarrenm * Cryptographic Framework (KCF) as a software provider for the SHA2 45*1694Sdarrenm * mechanisms. 46*1694Sdarrenm */ 47*1694Sdarrenm 48*1694Sdarrenm static struct modlmisc modlmisc = { 49*1694Sdarrenm &mod_miscops, 50*1694Sdarrenm "SHA2 Message-Digest Algorithm" 51*1694Sdarrenm }; 52*1694Sdarrenm 53*1694Sdarrenm static struct modlcrypto modlcrypto = { 54*1694Sdarrenm &mod_cryptoops, 55*1694Sdarrenm "SHA2 Kernel SW Provider %I%" 56*1694Sdarrenm }; 57*1694Sdarrenm 58*1694Sdarrenm static struct modlinkage modlinkage = { 59*1694Sdarrenm MODREV_1, &modlmisc, &modlcrypto, NULL 60*1694Sdarrenm }; 61*1694Sdarrenm 62*1694Sdarrenm /* 63*1694Sdarrenm * CSPI information (entry points, provider info, etc.) 64*1694Sdarrenm */ 65*1694Sdarrenm 66*1694Sdarrenm /* 67*1694Sdarrenm * Context for SHA2 mechanism. 68*1694Sdarrenm */ 69*1694Sdarrenm typedef struct sha2_ctx { 70*1694Sdarrenm sha2_mech_type_t sc_mech_type; /* type of context */ 71*1694Sdarrenm SHA2_CTX sc_sha2_ctx; /* SHA2 context */ 72*1694Sdarrenm } sha2_ctx_t; 73*1694Sdarrenm 74*1694Sdarrenm /* 75*1694Sdarrenm * Context for SHA2 HMAC and HMAC GENERAL mechanisms. 76*1694Sdarrenm */ 77*1694Sdarrenm typedef struct sha2_hmac_ctx { 78*1694Sdarrenm sha2_mech_type_t hc_mech_type; /* type of context */ 79*1694Sdarrenm uint32_t hc_digest_len; /* digest len in bytes */ 80*1694Sdarrenm SHA2_CTX hc_icontext; /* inner SHA2 context */ 81*1694Sdarrenm SHA2_CTX hc_ocontext; /* outer SHA2 context */ 82*1694Sdarrenm } sha2_hmac_ctx_t; 83*1694Sdarrenm 84*1694Sdarrenm /* 85*1694Sdarrenm * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed 86*1694Sdarrenm * by KCF to one of the entry points. 87*1694Sdarrenm */ 88*1694Sdarrenm 89*1694Sdarrenm #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private) 90*1694Sdarrenm #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private) 91*1694Sdarrenm 92*1694Sdarrenm /* to extract the digest length passed as mechanism parameter */ 93*1694Sdarrenm #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \ 94*1694Sdarrenm if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \ 95*1694Sdarrenm (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \ 96*1694Sdarrenm else { \ 97*1694Sdarrenm ulong_t tmp_ulong; \ 98*1694Sdarrenm bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \ 99*1694Sdarrenm (len) = (uint32_t)tmp_ulong; \ 100*1694Sdarrenm } \ 101*1694Sdarrenm } 102*1694Sdarrenm 103*1694Sdarrenm #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \ 104*1694Sdarrenm SHA2Init(mech, ctx); \ 105*1694Sdarrenm SHA2Update(ctx, key, len); \ 106*1694Sdarrenm SHA2Final(digest, ctx); \ 107*1694Sdarrenm } 108*1694Sdarrenm 109*1694Sdarrenm /* 110*1694Sdarrenm * Mechanism info structure passed to KCF during registration. 111*1694Sdarrenm */ 112*1694Sdarrenm static crypto_mech_info_t sha2_mech_info_tab[] = { 113*1694Sdarrenm /* SHA256 */ 114*1694Sdarrenm {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE, 115*1694Sdarrenm CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC, 116*1694Sdarrenm 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS}, 117*1694Sdarrenm /* SHA256-HMAC */ 118*1694Sdarrenm {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE, 119*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 120*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 121*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS}, 122*1694Sdarrenm /* SHA256-HMAC GENERAL */ 123*1694Sdarrenm {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE, 124*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 125*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 126*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS}, 127*1694Sdarrenm /* SHA384 */ 128*1694Sdarrenm {SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE, 129*1694Sdarrenm CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC, 130*1694Sdarrenm 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS}, 131*1694Sdarrenm /* SHA384-HMAC */ 132*1694Sdarrenm {SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE, 133*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 134*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 135*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS}, 136*1694Sdarrenm /* SHA384-HMAC GENERAL */ 137*1694Sdarrenm {SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE, 138*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 139*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 140*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS}, 141*1694Sdarrenm /* SHA512 */ 142*1694Sdarrenm {SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE, 143*1694Sdarrenm CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC, 144*1694Sdarrenm 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS}, 145*1694Sdarrenm /* SHA512-HMAC */ 146*1694Sdarrenm {SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE, 147*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 148*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 149*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS}, 150*1694Sdarrenm /* SHA512-HMAC GENERAL */ 151*1694Sdarrenm {SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE, 152*1694Sdarrenm CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC, 153*1694Sdarrenm SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN, 154*1694Sdarrenm CRYPTO_KEYSIZE_UNIT_IN_BITS} 155*1694Sdarrenm }; 156*1694Sdarrenm 157*1694Sdarrenm static void sha2_provider_status(crypto_provider_handle_t, uint_t *); 158*1694Sdarrenm 159*1694Sdarrenm static crypto_control_ops_t sha2_control_ops = { 160*1694Sdarrenm sha2_provider_status 161*1694Sdarrenm }; 162*1694Sdarrenm 163*1694Sdarrenm static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *, 164*1694Sdarrenm crypto_req_handle_t); 165*1694Sdarrenm static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 166*1694Sdarrenm crypto_req_handle_t); 167*1694Sdarrenm static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *, 168*1694Sdarrenm crypto_req_handle_t); 169*1694Sdarrenm static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *, 170*1694Sdarrenm crypto_req_handle_t); 171*1694Sdarrenm static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t, 172*1694Sdarrenm crypto_mechanism_t *, crypto_data_t *, crypto_data_t *, 173*1694Sdarrenm crypto_req_handle_t); 174*1694Sdarrenm 175*1694Sdarrenm static crypto_digest_ops_t sha2_digest_ops = { 176*1694Sdarrenm sha2_digest_init, 177*1694Sdarrenm sha2_digest, 178*1694Sdarrenm sha2_digest_update, 179*1694Sdarrenm NULL, 180*1694Sdarrenm sha2_digest_final, 181*1694Sdarrenm sha2_digest_atomic 182*1694Sdarrenm }; 183*1694Sdarrenm 184*1694Sdarrenm static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *, 185*1694Sdarrenm crypto_spi_ctx_template_t, crypto_req_handle_t); 186*1694Sdarrenm static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *, 187*1694Sdarrenm crypto_req_handle_t); 188*1694Sdarrenm static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t); 189*1694Sdarrenm static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t, 190*1694Sdarrenm crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 191*1694Sdarrenm crypto_spi_ctx_template_t, crypto_req_handle_t); 192*1694Sdarrenm static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t, 193*1694Sdarrenm crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 194*1694Sdarrenm crypto_spi_ctx_template_t, crypto_req_handle_t); 195*1694Sdarrenm 196*1694Sdarrenm static crypto_mac_ops_t sha2_mac_ops = { 197*1694Sdarrenm sha2_mac_init, 198*1694Sdarrenm NULL, 199*1694Sdarrenm sha2_mac_update, 200*1694Sdarrenm sha2_mac_final, 201*1694Sdarrenm sha2_mac_atomic, 202*1694Sdarrenm sha2_mac_verify_atomic 203*1694Sdarrenm }; 204*1694Sdarrenm 205*1694Sdarrenm static int sha2_create_ctx_template(crypto_provider_handle_t, 206*1694Sdarrenm crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *, 207*1694Sdarrenm size_t *, crypto_req_handle_t); 208*1694Sdarrenm static int sha2_free_context(crypto_ctx_t *); 209*1694Sdarrenm 210*1694Sdarrenm static crypto_ctx_ops_t sha2_ctx_ops = { 211*1694Sdarrenm sha2_create_ctx_template, 212*1694Sdarrenm sha2_free_context 213*1694Sdarrenm }; 214*1694Sdarrenm 215*1694Sdarrenm static crypto_ops_t sha2_crypto_ops = { 216*1694Sdarrenm &sha2_control_ops, 217*1694Sdarrenm &sha2_digest_ops, 218*1694Sdarrenm NULL, 219*1694Sdarrenm &sha2_mac_ops, 220*1694Sdarrenm NULL, 221*1694Sdarrenm NULL, 222*1694Sdarrenm NULL, 223*1694Sdarrenm NULL, 224*1694Sdarrenm NULL, 225*1694Sdarrenm NULL, 226*1694Sdarrenm NULL, 227*1694Sdarrenm NULL, 228*1694Sdarrenm NULL, 229*1694Sdarrenm &sha2_ctx_ops 230*1694Sdarrenm }; 231*1694Sdarrenm 232*1694Sdarrenm static crypto_provider_info_t sha2_prov_info = { 233*1694Sdarrenm CRYPTO_SPI_VERSION_1, 234*1694Sdarrenm "SHA2 Software Provider", 235*1694Sdarrenm CRYPTO_SW_PROVIDER, 236*1694Sdarrenm {&modlinkage}, 237*1694Sdarrenm NULL, 238*1694Sdarrenm &sha2_crypto_ops, 239*1694Sdarrenm sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t), 240*1694Sdarrenm sha2_mech_info_tab 241*1694Sdarrenm }; 242*1694Sdarrenm 243*1694Sdarrenm static crypto_kcf_provider_handle_t sha2_prov_handle = NULL; 244*1694Sdarrenm 245*1694Sdarrenm int 246*1694Sdarrenm _init() 247*1694Sdarrenm { 248*1694Sdarrenm int ret; 249*1694Sdarrenm 250*1694Sdarrenm if ((ret = mod_install(&modlinkage)) != 0) 251*1694Sdarrenm return (ret); 252*1694Sdarrenm 253*1694Sdarrenm /* 254*1694Sdarrenm * Register with KCF. If the registration fails, log an 255*1694Sdarrenm * error but do not uninstall the module, since the functionality 256*1694Sdarrenm * provided by misc/sha2 should still be available. 257*1694Sdarrenm */ 258*1694Sdarrenm if ((ret = crypto_register_provider(&sha2_prov_info, 259*1694Sdarrenm &sha2_prov_handle)) != CRYPTO_SUCCESS) 260*1694Sdarrenm cmn_err(CE_WARN, "sha2 _init: " 261*1694Sdarrenm "crypto_register_provider() failed (0x%x)", ret); 262*1694Sdarrenm 263*1694Sdarrenm return (0); 264*1694Sdarrenm } 265*1694Sdarrenm 266*1694Sdarrenm int 267*1694Sdarrenm _info(struct modinfo *modinfop) 268*1694Sdarrenm { 269*1694Sdarrenm return (mod_info(&modlinkage, modinfop)); 270*1694Sdarrenm } 271*1694Sdarrenm 272*1694Sdarrenm /* 273*1694Sdarrenm * KCF software provider control entry points. 274*1694Sdarrenm */ 275*1694Sdarrenm /* ARGSUSED */ 276*1694Sdarrenm static void 277*1694Sdarrenm sha2_provider_status(crypto_provider_handle_t provider, uint_t *status) 278*1694Sdarrenm { 279*1694Sdarrenm *status = CRYPTO_PROVIDER_READY; 280*1694Sdarrenm } 281*1694Sdarrenm 282*1694Sdarrenm /* 283*1694Sdarrenm * KCF software provider digest entry points. 284*1694Sdarrenm */ 285*1694Sdarrenm 286*1694Sdarrenm static int 287*1694Sdarrenm sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 288*1694Sdarrenm crypto_req_handle_t req) 289*1694Sdarrenm { 290*1694Sdarrenm 291*1694Sdarrenm /* 292*1694Sdarrenm * Allocate and initialize SHA2 context. 293*1694Sdarrenm */ 294*1694Sdarrenm ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t), 295*1694Sdarrenm crypto_kmflag(req)); 296*1694Sdarrenm if (ctx->cc_provider_private == NULL) 297*1694Sdarrenm return (CRYPTO_HOST_MEMORY); 298*1694Sdarrenm 299*1694Sdarrenm PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type; 300*1694Sdarrenm SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx); 301*1694Sdarrenm 302*1694Sdarrenm return (CRYPTO_SUCCESS); 303*1694Sdarrenm } 304*1694Sdarrenm 305*1694Sdarrenm /* 306*1694Sdarrenm * Helper SHA2 digest update function for uio data. 307*1694Sdarrenm */ 308*1694Sdarrenm static int 309*1694Sdarrenm sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data) 310*1694Sdarrenm { 311*1694Sdarrenm off_t offset = data->cd_offset; 312*1694Sdarrenm size_t length = data->cd_length; 313*1694Sdarrenm uint_t vec_idx; 314*1694Sdarrenm size_t cur_len; 315*1694Sdarrenm 316*1694Sdarrenm /* we support only kernel buffer */ 317*1694Sdarrenm if (data->cd_uio->uio_segflg != UIO_SYSSPACE) 318*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 319*1694Sdarrenm 320*1694Sdarrenm /* 321*1694Sdarrenm * Jump to the first iovec containing data to be 322*1694Sdarrenm * digested. 323*1694Sdarrenm */ 324*1694Sdarrenm for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt && 325*1694Sdarrenm offset >= data->cd_uio->uio_iov[vec_idx].iov_len; 326*1694Sdarrenm offset -= data->cd_uio->uio_iov[vec_idx++].iov_len); 327*1694Sdarrenm if (vec_idx == data->cd_uio->uio_iovcnt) { 328*1694Sdarrenm /* 329*1694Sdarrenm * The caller specified an offset that is larger than the 330*1694Sdarrenm * total size of the buffers it provided. 331*1694Sdarrenm */ 332*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 333*1694Sdarrenm } 334*1694Sdarrenm 335*1694Sdarrenm /* 336*1694Sdarrenm * Now do the digesting on the iovecs. 337*1694Sdarrenm */ 338*1694Sdarrenm while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) { 339*1694Sdarrenm cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len - 340*1694Sdarrenm offset, length); 341*1694Sdarrenm 342*1694Sdarrenm SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio-> 343*1694Sdarrenm uio_iov[vec_idx].iov_base + offset, cur_len); 344*1694Sdarrenm length -= cur_len; 345*1694Sdarrenm vec_idx++; 346*1694Sdarrenm offset = 0; 347*1694Sdarrenm } 348*1694Sdarrenm 349*1694Sdarrenm if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) { 350*1694Sdarrenm /* 351*1694Sdarrenm * The end of the specified iovec's was reached but 352*1694Sdarrenm * the length requested could not be processed, i.e. 353*1694Sdarrenm * The caller requested to digest more data than it provided. 354*1694Sdarrenm */ 355*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 356*1694Sdarrenm } 357*1694Sdarrenm 358*1694Sdarrenm return (CRYPTO_SUCCESS); 359*1694Sdarrenm } 360*1694Sdarrenm 361*1694Sdarrenm /* 362*1694Sdarrenm * Helper SHA2 digest final function for uio data. 363*1694Sdarrenm * digest_len is the length of the desired digest. If digest_len 364*1694Sdarrenm * is smaller than the default SHA2 digest length, the caller 365*1694Sdarrenm * must pass a scratch buffer, digest_scratch, which must 366*1694Sdarrenm * be at least the algorithm's digest length bytes. 367*1694Sdarrenm */ 368*1694Sdarrenm static int 369*1694Sdarrenm sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest, 370*1694Sdarrenm ulong_t digest_len, uchar_t *digest_scratch) 371*1694Sdarrenm { 372*1694Sdarrenm off_t offset = digest->cd_offset; 373*1694Sdarrenm uint_t vec_idx; 374*1694Sdarrenm 375*1694Sdarrenm /* we support only kernel buffer */ 376*1694Sdarrenm if (digest->cd_uio->uio_segflg != UIO_SYSSPACE) 377*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 378*1694Sdarrenm 379*1694Sdarrenm /* 380*1694Sdarrenm * Jump to the first iovec containing ptr to the digest to 381*1694Sdarrenm * be returned. 382*1694Sdarrenm */ 383*1694Sdarrenm for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len && 384*1694Sdarrenm vec_idx < digest->cd_uio->uio_iovcnt; 385*1694Sdarrenm offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len); 386*1694Sdarrenm if (vec_idx == digest->cd_uio->uio_iovcnt) { 387*1694Sdarrenm /* 388*1694Sdarrenm * The caller specified an offset that is 389*1694Sdarrenm * larger than the total size of the buffers 390*1694Sdarrenm * it provided. 391*1694Sdarrenm */ 392*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 393*1694Sdarrenm } 394*1694Sdarrenm 395*1694Sdarrenm if (offset + digest_len <= 396*1694Sdarrenm digest->cd_uio->uio_iov[vec_idx].iov_len) { 397*1694Sdarrenm /* 398*1694Sdarrenm * The computed SHA2 digest will fit in the current 399*1694Sdarrenm * iovec. 400*1694Sdarrenm */ 401*1694Sdarrenm if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) && 402*1694Sdarrenm (digest_len != SHA256_DIGEST_LENGTH)) || 403*1694Sdarrenm ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) && 404*1694Sdarrenm (digest_len != SHA512_DIGEST_LENGTH))) { 405*1694Sdarrenm /* 406*1694Sdarrenm * The caller requested a short digest. Digest 407*1694Sdarrenm * into a scratch buffer and return to 408*1694Sdarrenm * the user only what was requested. 409*1694Sdarrenm */ 410*1694Sdarrenm SHA2Final(digest_scratch, sha2_ctx); 411*1694Sdarrenm 412*1694Sdarrenm bcopy(digest_scratch, (uchar_t *)digest-> 413*1694Sdarrenm cd_uio->uio_iov[vec_idx].iov_base + offset, 414*1694Sdarrenm digest_len); 415*1694Sdarrenm } else { 416*1694Sdarrenm SHA2Final((uchar_t *)digest-> 417*1694Sdarrenm cd_uio->uio_iov[vec_idx].iov_base + offset, 418*1694Sdarrenm sha2_ctx); 419*1694Sdarrenm 420*1694Sdarrenm } 421*1694Sdarrenm } else { 422*1694Sdarrenm /* 423*1694Sdarrenm * The computed digest will be crossing one or more iovec's. 424*1694Sdarrenm * This is bad performance-wise but we need to support it. 425*1694Sdarrenm * Allocate a small scratch buffer on the stack and 426*1694Sdarrenm * copy it piece meal to the specified digest iovec's. 427*1694Sdarrenm */ 428*1694Sdarrenm uchar_t digest_tmp[SHA512_DIGEST_LENGTH]; 429*1694Sdarrenm off_t scratch_offset = 0; 430*1694Sdarrenm size_t length = digest_len; 431*1694Sdarrenm size_t cur_len; 432*1694Sdarrenm 433*1694Sdarrenm SHA2Final(digest_tmp, sha2_ctx); 434*1694Sdarrenm 435*1694Sdarrenm while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) { 436*1694Sdarrenm cur_len = 437*1694Sdarrenm MIN(digest->cd_uio->uio_iov[vec_idx].iov_len - 438*1694Sdarrenm offset, length); 439*1694Sdarrenm bcopy(digest_tmp + scratch_offset, 440*1694Sdarrenm digest->cd_uio->uio_iov[vec_idx].iov_base + offset, 441*1694Sdarrenm cur_len); 442*1694Sdarrenm 443*1694Sdarrenm length -= cur_len; 444*1694Sdarrenm vec_idx++; 445*1694Sdarrenm scratch_offset += cur_len; 446*1694Sdarrenm offset = 0; 447*1694Sdarrenm } 448*1694Sdarrenm 449*1694Sdarrenm if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) { 450*1694Sdarrenm /* 451*1694Sdarrenm * The end of the specified iovec's was reached but 452*1694Sdarrenm * the length requested could not be processed, i.e. 453*1694Sdarrenm * The caller requested to digest more data than it 454*1694Sdarrenm * provided. 455*1694Sdarrenm */ 456*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 457*1694Sdarrenm } 458*1694Sdarrenm } 459*1694Sdarrenm 460*1694Sdarrenm return (CRYPTO_SUCCESS); 461*1694Sdarrenm } 462*1694Sdarrenm 463*1694Sdarrenm /* 464*1694Sdarrenm * Helper SHA2 digest update for mblk's. 465*1694Sdarrenm */ 466*1694Sdarrenm static int 467*1694Sdarrenm sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data) 468*1694Sdarrenm { 469*1694Sdarrenm off_t offset = data->cd_offset; 470*1694Sdarrenm size_t length = data->cd_length; 471*1694Sdarrenm mblk_t *mp; 472*1694Sdarrenm size_t cur_len; 473*1694Sdarrenm 474*1694Sdarrenm /* 475*1694Sdarrenm * Jump to the first mblk_t containing data to be digested. 476*1694Sdarrenm */ 477*1694Sdarrenm for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp); 478*1694Sdarrenm offset -= MBLKL(mp), mp = mp->b_cont); 479*1694Sdarrenm if (mp == NULL) { 480*1694Sdarrenm /* 481*1694Sdarrenm * The caller specified an offset that is larger than the 482*1694Sdarrenm * total size of the buffers it provided. 483*1694Sdarrenm */ 484*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 485*1694Sdarrenm } 486*1694Sdarrenm 487*1694Sdarrenm /* 488*1694Sdarrenm * Now do the digesting on the mblk chain. 489*1694Sdarrenm */ 490*1694Sdarrenm while (mp != NULL && length > 0) { 491*1694Sdarrenm cur_len = MIN(MBLKL(mp) - offset, length); 492*1694Sdarrenm SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len); 493*1694Sdarrenm length -= cur_len; 494*1694Sdarrenm offset = 0; 495*1694Sdarrenm mp = mp->b_cont; 496*1694Sdarrenm } 497*1694Sdarrenm 498*1694Sdarrenm if (mp == NULL && length > 0) { 499*1694Sdarrenm /* 500*1694Sdarrenm * The end of the mblk was reached but the length requested 501*1694Sdarrenm * could not be processed, i.e. The caller requested 502*1694Sdarrenm * to digest more data than it provided. 503*1694Sdarrenm */ 504*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 505*1694Sdarrenm } 506*1694Sdarrenm 507*1694Sdarrenm return (CRYPTO_SUCCESS); 508*1694Sdarrenm } 509*1694Sdarrenm 510*1694Sdarrenm /* 511*1694Sdarrenm * Helper SHA2 digest final for mblk's. 512*1694Sdarrenm * digest_len is the length of the desired digest. If digest_len 513*1694Sdarrenm * is smaller than the default SHA2 digest length, the caller 514*1694Sdarrenm * must pass a scratch buffer, digest_scratch, which must 515*1694Sdarrenm * be at least the algorithm's digest length bytes. 516*1694Sdarrenm */ 517*1694Sdarrenm static int 518*1694Sdarrenm sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest, 519*1694Sdarrenm ulong_t digest_len, uchar_t *digest_scratch) 520*1694Sdarrenm { 521*1694Sdarrenm off_t offset = digest->cd_offset; 522*1694Sdarrenm mblk_t *mp; 523*1694Sdarrenm 524*1694Sdarrenm /* 525*1694Sdarrenm * Jump to the first mblk_t that will be used to store the digest. 526*1694Sdarrenm */ 527*1694Sdarrenm for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp); 528*1694Sdarrenm offset -= MBLKL(mp), mp = mp->b_cont); 529*1694Sdarrenm if (mp == NULL) { 530*1694Sdarrenm /* 531*1694Sdarrenm * The caller specified an offset that is larger than the 532*1694Sdarrenm * total size of the buffers it provided. 533*1694Sdarrenm */ 534*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 535*1694Sdarrenm } 536*1694Sdarrenm 537*1694Sdarrenm if (offset + digest_len <= MBLKL(mp)) { 538*1694Sdarrenm /* 539*1694Sdarrenm * The computed SHA2 digest will fit in the current mblk. 540*1694Sdarrenm * Do the SHA2Final() in-place. 541*1694Sdarrenm */ 542*1694Sdarrenm if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) && 543*1694Sdarrenm (digest_len != SHA256_DIGEST_LENGTH)) || 544*1694Sdarrenm ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) && 545*1694Sdarrenm (digest_len != SHA512_DIGEST_LENGTH))) { 546*1694Sdarrenm /* 547*1694Sdarrenm * The caller requested a short digest. Digest 548*1694Sdarrenm * into a scratch buffer and return to 549*1694Sdarrenm * the user only what was requested. 550*1694Sdarrenm */ 551*1694Sdarrenm SHA2Final(digest_scratch, sha2_ctx); 552*1694Sdarrenm bcopy(digest_scratch, mp->b_rptr + offset, digest_len); 553*1694Sdarrenm } else { 554*1694Sdarrenm SHA2Final(mp->b_rptr + offset, sha2_ctx); 555*1694Sdarrenm } 556*1694Sdarrenm } else { 557*1694Sdarrenm /* 558*1694Sdarrenm * The computed digest will be crossing one or more mblk's. 559*1694Sdarrenm * This is bad performance-wise but we need to support it. 560*1694Sdarrenm * Allocate a small scratch buffer on the stack and 561*1694Sdarrenm * copy it piece meal to the specified digest iovec's. 562*1694Sdarrenm */ 563*1694Sdarrenm uchar_t digest_tmp[SHA512_DIGEST_LENGTH]; 564*1694Sdarrenm off_t scratch_offset = 0; 565*1694Sdarrenm size_t length = digest_len; 566*1694Sdarrenm size_t cur_len; 567*1694Sdarrenm 568*1694Sdarrenm SHA2Final(digest_tmp, sha2_ctx); 569*1694Sdarrenm 570*1694Sdarrenm while (mp != NULL && length > 0) { 571*1694Sdarrenm cur_len = MIN(MBLKL(mp) - offset, length); 572*1694Sdarrenm bcopy(digest_tmp + scratch_offset, 573*1694Sdarrenm mp->b_rptr + offset, cur_len); 574*1694Sdarrenm 575*1694Sdarrenm length -= cur_len; 576*1694Sdarrenm mp = mp->b_cont; 577*1694Sdarrenm scratch_offset += cur_len; 578*1694Sdarrenm offset = 0; 579*1694Sdarrenm } 580*1694Sdarrenm 581*1694Sdarrenm if (mp == NULL && length > 0) { 582*1694Sdarrenm /* 583*1694Sdarrenm * The end of the specified mblk was reached but 584*1694Sdarrenm * the length requested could not be processed, i.e. 585*1694Sdarrenm * The caller requested to digest more data than it 586*1694Sdarrenm * provided. 587*1694Sdarrenm */ 588*1694Sdarrenm return (CRYPTO_DATA_LEN_RANGE); 589*1694Sdarrenm } 590*1694Sdarrenm } 591*1694Sdarrenm 592*1694Sdarrenm return (CRYPTO_SUCCESS); 593*1694Sdarrenm } 594*1694Sdarrenm 595*1694Sdarrenm /* ARGSUSED */ 596*1694Sdarrenm static int 597*1694Sdarrenm sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest, 598*1694Sdarrenm crypto_req_handle_t req) 599*1694Sdarrenm { 600*1694Sdarrenm int ret = CRYPTO_SUCCESS; 601*1694Sdarrenm uint_t sha_digest_len; 602*1694Sdarrenm 603*1694Sdarrenm ASSERT(ctx->cc_provider_private != NULL); 604*1694Sdarrenm 605*1694Sdarrenm switch (PROV_SHA2_CTX(ctx)->sc_mech_type) { 606*1694Sdarrenm case SHA256_MECH_INFO_TYPE: 607*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 608*1694Sdarrenm break; 609*1694Sdarrenm case SHA384_MECH_INFO_TYPE: 610*1694Sdarrenm sha_digest_len = SHA384_DIGEST_LENGTH; 611*1694Sdarrenm break; 612*1694Sdarrenm case SHA512_MECH_INFO_TYPE: 613*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 614*1694Sdarrenm break; 615*1694Sdarrenm default: 616*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 617*1694Sdarrenm } 618*1694Sdarrenm 619*1694Sdarrenm /* 620*1694Sdarrenm * We need to just return the length needed to store the output. 621*1694Sdarrenm * We should not destroy the context for the following cases. 622*1694Sdarrenm */ 623*1694Sdarrenm if ((digest->cd_length == 0) || 624*1694Sdarrenm (digest->cd_length < sha_digest_len)) { 625*1694Sdarrenm digest->cd_length = sha_digest_len; 626*1694Sdarrenm return (CRYPTO_BUFFER_TOO_SMALL); 627*1694Sdarrenm } 628*1694Sdarrenm 629*1694Sdarrenm /* 630*1694Sdarrenm * Do the SHA2 update on the specified input data. 631*1694Sdarrenm */ 632*1694Sdarrenm switch (data->cd_format) { 633*1694Sdarrenm case CRYPTO_DATA_RAW: 634*1694Sdarrenm SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 635*1694Sdarrenm (uint8_t *)data->cd_raw.iov_base + data->cd_offset, 636*1694Sdarrenm data->cd_length); 637*1694Sdarrenm break; 638*1694Sdarrenm case CRYPTO_DATA_UIO: 639*1694Sdarrenm ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 640*1694Sdarrenm data); 641*1694Sdarrenm break; 642*1694Sdarrenm case CRYPTO_DATA_MBLK: 643*1694Sdarrenm ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 644*1694Sdarrenm data); 645*1694Sdarrenm break; 646*1694Sdarrenm default: 647*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 648*1694Sdarrenm } 649*1694Sdarrenm 650*1694Sdarrenm if (ret != CRYPTO_SUCCESS) { 651*1694Sdarrenm /* the update failed, free context and bail */ 652*1694Sdarrenm kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t)); 653*1694Sdarrenm ctx->cc_provider_private = NULL; 654*1694Sdarrenm digest->cd_length = 0; 655*1694Sdarrenm return (ret); 656*1694Sdarrenm } 657*1694Sdarrenm 658*1694Sdarrenm /* 659*1694Sdarrenm * Do a SHA2 final, must be done separately since the digest 660*1694Sdarrenm * type can be different than the input data type. 661*1694Sdarrenm */ 662*1694Sdarrenm switch (digest->cd_format) { 663*1694Sdarrenm case CRYPTO_DATA_RAW: 664*1694Sdarrenm SHA2Final((unsigned char *)digest->cd_raw.iov_base + 665*1694Sdarrenm digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx); 666*1694Sdarrenm break; 667*1694Sdarrenm case CRYPTO_DATA_UIO: 668*1694Sdarrenm ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 669*1694Sdarrenm digest, sha_digest_len, NULL); 670*1694Sdarrenm break; 671*1694Sdarrenm case CRYPTO_DATA_MBLK: 672*1694Sdarrenm ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 673*1694Sdarrenm digest, sha_digest_len, NULL); 674*1694Sdarrenm break; 675*1694Sdarrenm default: 676*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 677*1694Sdarrenm } 678*1694Sdarrenm 679*1694Sdarrenm /* all done, free context and return */ 680*1694Sdarrenm 681*1694Sdarrenm if (ret == CRYPTO_SUCCESS) 682*1694Sdarrenm digest->cd_length = sha_digest_len; 683*1694Sdarrenm else 684*1694Sdarrenm digest->cd_length = 0; 685*1694Sdarrenm 686*1694Sdarrenm kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t)); 687*1694Sdarrenm ctx->cc_provider_private = NULL; 688*1694Sdarrenm return (ret); 689*1694Sdarrenm } 690*1694Sdarrenm 691*1694Sdarrenm /* ARGSUSED */ 692*1694Sdarrenm static int 693*1694Sdarrenm sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data, 694*1694Sdarrenm crypto_req_handle_t req) 695*1694Sdarrenm { 696*1694Sdarrenm int ret = CRYPTO_SUCCESS; 697*1694Sdarrenm 698*1694Sdarrenm ASSERT(ctx->cc_provider_private != NULL); 699*1694Sdarrenm 700*1694Sdarrenm /* 701*1694Sdarrenm * Do the SHA2 update on the specified input data. 702*1694Sdarrenm */ 703*1694Sdarrenm switch (data->cd_format) { 704*1694Sdarrenm case CRYPTO_DATA_RAW: 705*1694Sdarrenm SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 706*1694Sdarrenm (uint8_t *)data->cd_raw.iov_base + data->cd_offset, 707*1694Sdarrenm data->cd_length); 708*1694Sdarrenm break; 709*1694Sdarrenm case CRYPTO_DATA_UIO: 710*1694Sdarrenm ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 711*1694Sdarrenm data); 712*1694Sdarrenm break; 713*1694Sdarrenm case CRYPTO_DATA_MBLK: 714*1694Sdarrenm ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 715*1694Sdarrenm data); 716*1694Sdarrenm break; 717*1694Sdarrenm default: 718*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 719*1694Sdarrenm } 720*1694Sdarrenm 721*1694Sdarrenm return (ret); 722*1694Sdarrenm } 723*1694Sdarrenm 724*1694Sdarrenm /* ARGSUSED */ 725*1694Sdarrenm static int 726*1694Sdarrenm sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest, 727*1694Sdarrenm crypto_req_handle_t req) 728*1694Sdarrenm { 729*1694Sdarrenm int ret = CRYPTO_SUCCESS; 730*1694Sdarrenm uint_t sha_digest_len; 731*1694Sdarrenm 732*1694Sdarrenm ASSERT(ctx->cc_provider_private != NULL); 733*1694Sdarrenm 734*1694Sdarrenm switch (PROV_SHA2_CTX(ctx)->sc_mech_type) { 735*1694Sdarrenm case SHA256_MECH_INFO_TYPE: 736*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 737*1694Sdarrenm break; 738*1694Sdarrenm case SHA384_MECH_INFO_TYPE: 739*1694Sdarrenm sha_digest_len = SHA384_DIGEST_LENGTH; 740*1694Sdarrenm break; 741*1694Sdarrenm case SHA512_MECH_INFO_TYPE: 742*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 743*1694Sdarrenm break; 744*1694Sdarrenm default: 745*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 746*1694Sdarrenm } 747*1694Sdarrenm 748*1694Sdarrenm /* 749*1694Sdarrenm * We need to just return the length needed to store the output. 750*1694Sdarrenm * We should not destroy the context for the following cases. 751*1694Sdarrenm */ 752*1694Sdarrenm if ((digest->cd_length == 0) || 753*1694Sdarrenm (digest->cd_length < sha_digest_len)) { 754*1694Sdarrenm digest->cd_length = sha_digest_len; 755*1694Sdarrenm return (CRYPTO_BUFFER_TOO_SMALL); 756*1694Sdarrenm } 757*1694Sdarrenm 758*1694Sdarrenm /* 759*1694Sdarrenm * Do a SHA2 final. 760*1694Sdarrenm */ 761*1694Sdarrenm switch (digest->cd_format) { 762*1694Sdarrenm case CRYPTO_DATA_RAW: 763*1694Sdarrenm SHA2Final((unsigned char *)digest->cd_raw.iov_base + 764*1694Sdarrenm digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx); 765*1694Sdarrenm break; 766*1694Sdarrenm case CRYPTO_DATA_UIO: 767*1694Sdarrenm ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 768*1694Sdarrenm digest, sha_digest_len, NULL); 769*1694Sdarrenm break; 770*1694Sdarrenm case CRYPTO_DATA_MBLK: 771*1694Sdarrenm ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx, 772*1694Sdarrenm digest, sha_digest_len, NULL); 773*1694Sdarrenm break; 774*1694Sdarrenm default: 775*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 776*1694Sdarrenm } 777*1694Sdarrenm 778*1694Sdarrenm /* all done, free context and return */ 779*1694Sdarrenm 780*1694Sdarrenm if (ret == CRYPTO_SUCCESS) 781*1694Sdarrenm digest->cd_length = sha_digest_len; 782*1694Sdarrenm else 783*1694Sdarrenm digest->cd_length = 0; 784*1694Sdarrenm 785*1694Sdarrenm kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t)); 786*1694Sdarrenm ctx->cc_provider_private = NULL; 787*1694Sdarrenm 788*1694Sdarrenm return (ret); 789*1694Sdarrenm } 790*1694Sdarrenm 791*1694Sdarrenm /* ARGSUSED */ 792*1694Sdarrenm static int 793*1694Sdarrenm sha2_digest_atomic(crypto_provider_handle_t provider, 794*1694Sdarrenm crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 795*1694Sdarrenm crypto_data_t *data, crypto_data_t *digest, 796*1694Sdarrenm crypto_req_handle_t req) 797*1694Sdarrenm { 798*1694Sdarrenm int ret = CRYPTO_SUCCESS; 799*1694Sdarrenm SHA2_CTX sha2_ctx; 800*1694Sdarrenm uint32_t sha_digest_len; 801*1694Sdarrenm 802*1694Sdarrenm /* 803*1694Sdarrenm * Do the SHA inits. 804*1694Sdarrenm */ 805*1694Sdarrenm 806*1694Sdarrenm SHA2Init(mechanism->cm_type, &sha2_ctx); 807*1694Sdarrenm 808*1694Sdarrenm switch (data->cd_format) { 809*1694Sdarrenm case CRYPTO_DATA_RAW: 810*1694Sdarrenm SHA2Update(&sha2_ctx, (uint8_t *)data-> 811*1694Sdarrenm cd_raw.iov_base + data->cd_offset, data->cd_length); 812*1694Sdarrenm break; 813*1694Sdarrenm case CRYPTO_DATA_UIO: 814*1694Sdarrenm ret = sha2_digest_update_uio(&sha2_ctx, data); 815*1694Sdarrenm break; 816*1694Sdarrenm case CRYPTO_DATA_MBLK: 817*1694Sdarrenm ret = sha2_digest_update_mblk(&sha2_ctx, data); 818*1694Sdarrenm break; 819*1694Sdarrenm default: 820*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 821*1694Sdarrenm } 822*1694Sdarrenm 823*1694Sdarrenm /* 824*1694Sdarrenm * Do the SHA updates on the specified input data. 825*1694Sdarrenm */ 826*1694Sdarrenm 827*1694Sdarrenm if (ret != CRYPTO_SUCCESS) { 828*1694Sdarrenm /* the update failed, bail */ 829*1694Sdarrenm digest->cd_length = 0; 830*1694Sdarrenm return (ret); 831*1694Sdarrenm } 832*1694Sdarrenm 833*1694Sdarrenm if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) 834*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 835*1694Sdarrenm else 836*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 837*1694Sdarrenm 838*1694Sdarrenm /* 839*1694Sdarrenm * Do a SHA2 final, must be done separately since the digest 840*1694Sdarrenm * type can be different than the input data type. 841*1694Sdarrenm */ 842*1694Sdarrenm switch (digest->cd_format) { 843*1694Sdarrenm case CRYPTO_DATA_RAW: 844*1694Sdarrenm SHA2Final((unsigned char *)digest->cd_raw.iov_base + 845*1694Sdarrenm digest->cd_offset, &sha2_ctx); 846*1694Sdarrenm break; 847*1694Sdarrenm case CRYPTO_DATA_UIO: 848*1694Sdarrenm ret = sha2_digest_final_uio(&sha2_ctx, digest, 849*1694Sdarrenm sha_digest_len, NULL); 850*1694Sdarrenm break; 851*1694Sdarrenm case CRYPTO_DATA_MBLK: 852*1694Sdarrenm ret = sha2_digest_final_mblk(&sha2_ctx, digest, 853*1694Sdarrenm sha_digest_len, NULL); 854*1694Sdarrenm break; 855*1694Sdarrenm default: 856*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 857*1694Sdarrenm } 858*1694Sdarrenm 859*1694Sdarrenm if (ret == CRYPTO_SUCCESS) 860*1694Sdarrenm digest->cd_length = sha_digest_len; 861*1694Sdarrenm else 862*1694Sdarrenm digest->cd_length = 0; 863*1694Sdarrenm 864*1694Sdarrenm return (ret); 865*1694Sdarrenm } 866*1694Sdarrenm 867*1694Sdarrenm /* 868*1694Sdarrenm * KCF software provider mac entry points. 869*1694Sdarrenm * 870*1694Sdarrenm * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text)) 871*1694Sdarrenm * 872*1694Sdarrenm * Init: 873*1694Sdarrenm * The initialization routine initializes what we denote 874*1694Sdarrenm * as the inner and outer contexts by doing 875*1694Sdarrenm * - for inner context: SHA2(key XOR ipad) 876*1694Sdarrenm * - for outer context: SHA2(key XOR opad) 877*1694Sdarrenm * 878*1694Sdarrenm * Update: 879*1694Sdarrenm * Each subsequent SHA2 HMAC update will result in an 880*1694Sdarrenm * update of the inner context with the specified data. 881*1694Sdarrenm * 882*1694Sdarrenm * Final: 883*1694Sdarrenm * The SHA2 HMAC final will do a SHA2 final operation on the 884*1694Sdarrenm * inner context, and the resulting digest will be used 885*1694Sdarrenm * as the data for an update on the outer context. Last 886*1694Sdarrenm * but not least, a SHA2 final on the outer context will 887*1694Sdarrenm * be performed to obtain the SHA2 HMAC digest to return 888*1694Sdarrenm * to the user. 889*1694Sdarrenm */ 890*1694Sdarrenm 891*1694Sdarrenm /* 892*1694Sdarrenm * Initialize a SHA2-HMAC context. 893*1694Sdarrenm */ 894*1694Sdarrenm static void 895*1694Sdarrenm sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes) 896*1694Sdarrenm { 897*1694Sdarrenm uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)]; 898*1694Sdarrenm uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)]; 899*1694Sdarrenm int i, block_size, blocks_per_int64; 900*1694Sdarrenm 901*1694Sdarrenm /* Determine the block size */ 902*1694Sdarrenm if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) { 903*1694Sdarrenm block_size = SHA256_HMAC_BLOCK_SIZE; 904*1694Sdarrenm blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t); 905*1694Sdarrenm } else { 906*1694Sdarrenm block_size = SHA512_HMAC_BLOCK_SIZE; 907*1694Sdarrenm blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t); 908*1694Sdarrenm } 909*1694Sdarrenm 910*1694Sdarrenm (void) bzero(ipad, block_size); 911*1694Sdarrenm (void) bzero(opad, block_size); 912*1694Sdarrenm (void) bcopy(keyval, ipad, length_in_bytes); 913*1694Sdarrenm (void) bcopy(keyval, opad, length_in_bytes); 914*1694Sdarrenm 915*1694Sdarrenm /* XOR key with ipad (0x36) and opad (0x5c) */ 916*1694Sdarrenm for (i = 0; i < blocks_per_int64; i ++) { 917*1694Sdarrenm ipad[i] ^= 0x3636363636363636; 918*1694Sdarrenm opad[i] ^= 0x5c5c5c5c5c5c5c5c; 919*1694Sdarrenm } 920*1694Sdarrenm 921*1694Sdarrenm /* perform SHA2 on ipad */ 922*1694Sdarrenm SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext); 923*1694Sdarrenm SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size); 924*1694Sdarrenm 925*1694Sdarrenm /* perform SHA2 on opad */ 926*1694Sdarrenm SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext); 927*1694Sdarrenm SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size); 928*1694Sdarrenm 929*1694Sdarrenm } 930*1694Sdarrenm 931*1694Sdarrenm /* 932*1694Sdarrenm */ 933*1694Sdarrenm static int 934*1694Sdarrenm sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 935*1694Sdarrenm crypto_key_t *key, crypto_spi_ctx_template_t ctx_template, 936*1694Sdarrenm crypto_req_handle_t req) 937*1694Sdarrenm { 938*1694Sdarrenm int ret = CRYPTO_SUCCESS; 939*1694Sdarrenm uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length); 940*1694Sdarrenm uint_t sha_digest_len, sha_hmac_block_size; 941*1694Sdarrenm 942*1694Sdarrenm /* 943*1694Sdarrenm * Set the digest length and block size to values approriate to the 944*1694Sdarrenm * mechanism 945*1694Sdarrenm */ 946*1694Sdarrenm switch (mechanism->cm_type) { 947*1694Sdarrenm case SHA256_HMAC_MECH_INFO_TYPE: 948*1694Sdarrenm case SHA256_HMAC_GEN_MECH_INFO_TYPE: 949*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 950*1694Sdarrenm sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE; 951*1694Sdarrenm break; 952*1694Sdarrenm case SHA384_HMAC_MECH_INFO_TYPE: 953*1694Sdarrenm case SHA384_HMAC_GEN_MECH_INFO_TYPE: 954*1694Sdarrenm case SHA512_HMAC_MECH_INFO_TYPE: 955*1694Sdarrenm case SHA512_HMAC_GEN_MECH_INFO_TYPE: 956*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 957*1694Sdarrenm sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE; 958*1694Sdarrenm break; 959*1694Sdarrenm default: 960*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 961*1694Sdarrenm } 962*1694Sdarrenm 963*1694Sdarrenm if (key->ck_format != CRYPTO_KEY_RAW) 964*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 965*1694Sdarrenm 966*1694Sdarrenm ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t), 967*1694Sdarrenm crypto_kmflag(req)); 968*1694Sdarrenm if (ctx->cc_provider_private == NULL) 969*1694Sdarrenm return (CRYPTO_HOST_MEMORY); 970*1694Sdarrenm 971*1694Sdarrenm if (ctx_template != NULL) { 972*1694Sdarrenm /* reuse context template */ 973*1694Sdarrenm bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx), 974*1694Sdarrenm sizeof (sha2_hmac_ctx_t)); 975*1694Sdarrenm } else { 976*1694Sdarrenm /* no context template, compute context */ 977*1694Sdarrenm if (keylen_in_bytes > sha_hmac_block_size) { 978*1694Sdarrenm uchar_t digested_key[SHA512_DIGEST_LENGTH]; 979*1694Sdarrenm sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private; 980*1694Sdarrenm 981*1694Sdarrenm /* 982*1694Sdarrenm * Hash the passed-in key to get a smaller key. 983*1694Sdarrenm * The inner context is used since it hasn't been 984*1694Sdarrenm * initialized yet. 985*1694Sdarrenm */ 986*1694Sdarrenm PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3, 987*1694Sdarrenm &hmac_ctx->hc_icontext, 988*1694Sdarrenm key->ck_data, keylen_in_bytes, digested_key); 989*1694Sdarrenm sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx), 990*1694Sdarrenm digested_key, sha_digest_len); 991*1694Sdarrenm } else { 992*1694Sdarrenm sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx), 993*1694Sdarrenm key->ck_data, keylen_in_bytes); 994*1694Sdarrenm } 995*1694Sdarrenm } 996*1694Sdarrenm 997*1694Sdarrenm /* 998*1694Sdarrenm * Get the mechanism parameters, if applicable. 999*1694Sdarrenm */ 1000*1694Sdarrenm PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type; 1001*1694Sdarrenm if (mechanism->cm_type % 3 == 2) { 1002*1694Sdarrenm if (mechanism->cm_param == NULL || 1003*1694Sdarrenm mechanism->cm_param_len != sizeof (ulong_t)) 1004*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1005*1694Sdarrenm PROV_SHA2_GET_DIGEST_LEN(mechanism, 1006*1694Sdarrenm PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len); 1007*1694Sdarrenm if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len) 1008*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1009*1694Sdarrenm } 1010*1694Sdarrenm 1011*1694Sdarrenm if (ret != CRYPTO_SUCCESS) { 1012*1694Sdarrenm bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t)); 1013*1694Sdarrenm kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t)); 1014*1694Sdarrenm ctx->cc_provider_private = NULL; 1015*1694Sdarrenm } 1016*1694Sdarrenm 1017*1694Sdarrenm return (ret); 1018*1694Sdarrenm } 1019*1694Sdarrenm 1020*1694Sdarrenm /* ARGSUSED */ 1021*1694Sdarrenm static int 1022*1694Sdarrenm sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data, 1023*1694Sdarrenm crypto_req_handle_t req) 1024*1694Sdarrenm { 1025*1694Sdarrenm int ret = CRYPTO_SUCCESS; 1026*1694Sdarrenm 1027*1694Sdarrenm ASSERT(ctx->cc_provider_private != NULL); 1028*1694Sdarrenm 1029*1694Sdarrenm /* 1030*1694Sdarrenm * Do a SHA2 update of the inner context using the specified 1031*1694Sdarrenm * data. 1032*1694Sdarrenm */ 1033*1694Sdarrenm switch (data->cd_format) { 1034*1694Sdarrenm case CRYPTO_DATA_RAW: 1035*1694Sdarrenm SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, 1036*1694Sdarrenm (uint8_t *)data->cd_raw.iov_base + data->cd_offset, 1037*1694Sdarrenm data->cd_length); 1038*1694Sdarrenm break; 1039*1694Sdarrenm case CRYPTO_DATA_UIO: 1040*1694Sdarrenm ret = sha2_digest_update_uio( 1041*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data); 1042*1694Sdarrenm break; 1043*1694Sdarrenm case CRYPTO_DATA_MBLK: 1044*1694Sdarrenm ret = sha2_digest_update_mblk( 1045*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data); 1046*1694Sdarrenm break; 1047*1694Sdarrenm default: 1048*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 1049*1694Sdarrenm } 1050*1694Sdarrenm 1051*1694Sdarrenm return (ret); 1052*1694Sdarrenm } 1053*1694Sdarrenm 1054*1694Sdarrenm /* ARGSUSED */ 1055*1694Sdarrenm static int 1056*1694Sdarrenm sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req) 1057*1694Sdarrenm { 1058*1694Sdarrenm int ret = CRYPTO_SUCCESS; 1059*1694Sdarrenm uchar_t digest[SHA512_DIGEST_LENGTH]; 1060*1694Sdarrenm uint32_t digest_len, sha_digest_len; 1061*1694Sdarrenm 1062*1694Sdarrenm ASSERT(ctx->cc_provider_private != NULL); 1063*1694Sdarrenm 1064*1694Sdarrenm /* Set the digest lengths to values approriate to the mechanism */ 1065*1694Sdarrenm switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) { 1066*1694Sdarrenm case SHA256_HMAC_MECH_INFO_TYPE: 1067*1694Sdarrenm sha_digest_len = digest_len = SHA256_DIGEST_LENGTH; 1068*1694Sdarrenm break; 1069*1694Sdarrenm case SHA384_HMAC_MECH_INFO_TYPE: 1070*1694Sdarrenm case SHA512_HMAC_MECH_INFO_TYPE: 1071*1694Sdarrenm sha_digest_len = digest_len = SHA512_DIGEST_LENGTH; 1072*1694Sdarrenm break; 1073*1694Sdarrenm case SHA256_HMAC_GEN_MECH_INFO_TYPE: 1074*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 1075*1694Sdarrenm digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len; 1076*1694Sdarrenm break; 1077*1694Sdarrenm case SHA384_HMAC_GEN_MECH_INFO_TYPE: 1078*1694Sdarrenm case SHA512_HMAC_GEN_MECH_INFO_TYPE: 1079*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 1080*1694Sdarrenm digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len; 1081*1694Sdarrenm break; 1082*1694Sdarrenm } 1083*1694Sdarrenm 1084*1694Sdarrenm /* 1085*1694Sdarrenm * We need to just return the length needed to store the output. 1086*1694Sdarrenm * We should not destroy the context for the following cases. 1087*1694Sdarrenm */ 1088*1694Sdarrenm if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) { 1089*1694Sdarrenm mac->cd_length = digest_len; 1090*1694Sdarrenm return (CRYPTO_BUFFER_TOO_SMALL); 1091*1694Sdarrenm } 1092*1694Sdarrenm 1093*1694Sdarrenm /* 1094*1694Sdarrenm * Do a SHA2 final on the inner context. 1095*1694Sdarrenm */ 1096*1694Sdarrenm SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext); 1097*1694Sdarrenm 1098*1694Sdarrenm /* 1099*1694Sdarrenm * Do a SHA2 update on the outer context, feeding the inner 1100*1694Sdarrenm * digest as data. 1101*1694Sdarrenm */ 1102*1694Sdarrenm SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest, 1103*1694Sdarrenm sha_digest_len); 1104*1694Sdarrenm 1105*1694Sdarrenm /* 1106*1694Sdarrenm * Do a SHA2 final on the outer context, storing the computing 1107*1694Sdarrenm * digest in the users buffer. 1108*1694Sdarrenm */ 1109*1694Sdarrenm switch (mac->cd_format) { 1110*1694Sdarrenm case CRYPTO_DATA_RAW: 1111*1694Sdarrenm if (digest_len != sha_digest_len) { 1112*1694Sdarrenm /* 1113*1694Sdarrenm * The caller requested a short digest. Digest 1114*1694Sdarrenm * into a scratch buffer and return to 1115*1694Sdarrenm * the user only what was requested. 1116*1694Sdarrenm */ 1117*1694Sdarrenm SHA2Final(digest, 1118*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext); 1119*1694Sdarrenm bcopy(digest, (unsigned char *)mac->cd_raw.iov_base + 1120*1694Sdarrenm mac->cd_offset, digest_len); 1121*1694Sdarrenm } else { 1122*1694Sdarrenm SHA2Final((unsigned char *)mac->cd_raw.iov_base + 1123*1694Sdarrenm mac->cd_offset, 1124*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext); 1125*1694Sdarrenm } 1126*1694Sdarrenm break; 1127*1694Sdarrenm case CRYPTO_DATA_UIO: 1128*1694Sdarrenm ret = sha2_digest_final_uio( 1129*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac, 1130*1694Sdarrenm digest_len, digest); 1131*1694Sdarrenm break; 1132*1694Sdarrenm case CRYPTO_DATA_MBLK: 1133*1694Sdarrenm ret = sha2_digest_final_mblk( 1134*1694Sdarrenm &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac, 1135*1694Sdarrenm digest_len, digest); 1136*1694Sdarrenm break; 1137*1694Sdarrenm default: 1138*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 1139*1694Sdarrenm } 1140*1694Sdarrenm 1141*1694Sdarrenm if (ret == CRYPTO_SUCCESS) 1142*1694Sdarrenm mac->cd_length = digest_len; 1143*1694Sdarrenm else 1144*1694Sdarrenm mac->cd_length = 0; 1145*1694Sdarrenm 1146*1694Sdarrenm bzero(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, sizeof (sha2_hmac_ctx_t)); 1147*1694Sdarrenm kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t)); 1148*1694Sdarrenm ctx->cc_provider_private = NULL; 1149*1694Sdarrenm 1150*1694Sdarrenm return (ret); 1151*1694Sdarrenm } 1152*1694Sdarrenm 1153*1694Sdarrenm #define SHA2_MAC_UPDATE(data, ctx, ret) { \ 1154*1694Sdarrenm switch (data->cd_format) { \ 1155*1694Sdarrenm case CRYPTO_DATA_RAW: \ 1156*1694Sdarrenm SHA2Update(&(ctx).hc_icontext, \ 1157*1694Sdarrenm (uint8_t *)data->cd_raw.iov_base + \ 1158*1694Sdarrenm data->cd_offset, data->cd_length); \ 1159*1694Sdarrenm break; \ 1160*1694Sdarrenm case CRYPTO_DATA_UIO: \ 1161*1694Sdarrenm ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \ 1162*1694Sdarrenm break; \ 1163*1694Sdarrenm case CRYPTO_DATA_MBLK: \ 1164*1694Sdarrenm ret = sha2_digest_update_mblk(&(ctx).hc_icontext, \ 1165*1694Sdarrenm data); \ 1166*1694Sdarrenm break; \ 1167*1694Sdarrenm default: \ 1168*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; \ 1169*1694Sdarrenm } \ 1170*1694Sdarrenm } 1171*1694Sdarrenm 1172*1694Sdarrenm /* ARGSUSED */ 1173*1694Sdarrenm static int 1174*1694Sdarrenm sha2_mac_atomic(crypto_provider_handle_t provider, 1175*1694Sdarrenm crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1176*1694Sdarrenm crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1177*1694Sdarrenm crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req) 1178*1694Sdarrenm { 1179*1694Sdarrenm int ret = CRYPTO_SUCCESS; 1180*1694Sdarrenm uchar_t digest[SHA512_DIGEST_LENGTH]; 1181*1694Sdarrenm sha2_hmac_ctx_t sha2_hmac_ctx; 1182*1694Sdarrenm uint32_t sha_digest_len, digest_len, sha_hmac_block_size; 1183*1694Sdarrenm uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length); 1184*1694Sdarrenm 1185*1694Sdarrenm /* 1186*1694Sdarrenm * Set the digest length and block size to values approriate to the 1187*1694Sdarrenm * mechanism 1188*1694Sdarrenm */ 1189*1694Sdarrenm switch (mechanism->cm_type) { 1190*1694Sdarrenm case SHA256_HMAC_MECH_INFO_TYPE: 1191*1694Sdarrenm case SHA256_HMAC_GEN_MECH_INFO_TYPE: 1192*1694Sdarrenm sha_digest_len = digest_len = SHA256_DIGEST_LENGTH; 1193*1694Sdarrenm sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE; 1194*1694Sdarrenm break; 1195*1694Sdarrenm case SHA384_HMAC_MECH_INFO_TYPE: 1196*1694Sdarrenm case SHA384_HMAC_GEN_MECH_INFO_TYPE: 1197*1694Sdarrenm case SHA512_HMAC_MECH_INFO_TYPE: 1198*1694Sdarrenm case SHA512_HMAC_GEN_MECH_INFO_TYPE: 1199*1694Sdarrenm sha_digest_len = digest_len = SHA512_DIGEST_LENGTH; 1200*1694Sdarrenm sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE; 1201*1694Sdarrenm break; 1202*1694Sdarrenm default: 1203*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 1204*1694Sdarrenm } 1205*1694Sdarrenm 1206*1694Sdarrenm /* Add support for key by attributes (RFE 4706552) */ 1207*1694Sdarrenm if (key->ck_format != CRYPTO_KEY_RAW) 1208*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 1209*1694Sdarrenm 1210*1694Sdarrenm if (ctx_template != NULL) { 1211*1694Sdarrenm /* reuse context template */ 1212*1694Sdarrenm bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t)); 1213*1694Sdarrenm } else { 1214*1694Sdarrenm sha2_hmac_ctx.hc_mech_type = mechanism->cm_type; 1215*1694Sdarrenm /* no context template, initialize context */ 1216*1694Sdarrenm if (keylen_in_bytes > sha_hmac_block_size) { 1217*1694Sdarrenm /* 1218*1694Sdarrenm * Hash the passed-in key to get a smaller key. 1219*1694Sdarrenm * The inner context is used since it hasn't been 1220*1694Sdarrenm * initialized yet. 1221*1694Sdarrenm */ 1222*1694Sdarrenm PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3, 1223*1694Sdarrenm &sha2_hmac_ctx.hc_icontext, 1224*1694Sdarrenm key->ck_data, keylen_in_bytes, digest); 1225*1694Sdarrenm sha2_mac_init_ctx(&sha2_hmac_ctx, digest, 1226*1694Sdarrenm sha_digest_len); 1227*1694Sdarrenm } else { 1228*1694Sdarrenm sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data, 1229*1694Sdarrenm keylen_in_bytes); 1230*1694Sdarrenm } 1231*1694Sdarrenm } 1232*1694Sdarrenm 1233*1694Sdarrenm /* get the mechanism parameters, if applicable */ 1234*1694Sdarrenm if ((mechanism->cm_type % 3) == 2) { 1235*1694Sdarrenm if (mechanism->cm_param == NULL || 1236*1694Sdarrenm mechanism->cm_param_len != sizeof (ulong_t)) { 1237*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1238*1694Sdarrenm goto bail; 1239*1694Sdarrenm } 1240*1694Sdarrenm PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len); 1241*1694Sdarrenm if (digest_len > sha_digest_len) { 1242*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1243*1694Sdarrenm goto bail; 1244*1694Sdarrenm } 1245*1694Sdarrenm } 1246*1694Sdarrenm 1247*1694Sdarrenm /* do a SHA2 update of the inner context using the specified data */ 1248*1694Sdarrenm SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret); 1249*1694Sdarrenm if (ret != CRYPTO_SUCCESS) 1250*1694Sdarrenm /* the update failed, free context and bail */ 1251*1694Sdarrenm goto bail; 1252*1694Sdarrenm 1253*1694Sdarrenm /* 1254*1694Sdarrenm * Do a SHA2 final on the inner context. 1255*1694Sdarrenm */ 1256*1694Sdarrenm SHA2Final(digest, &sha2_hmac_ctx.hc_icontext); 1257*1694Sdarrenm 1258*1694Sdarrenm /* 1259*1694Sdarrenm * Do an SHA2 update on the outer context, feeding the inner 1260*1694Sdarrenm * digest as data. 1261*1694Sdarrenm * 1262*1694Sdarrenm * Make sure that SHA384 is handled special because 1263*1694Sdarrenm * it cannot feed a 60-byte inner hash to the outer 1264*1694Sdarrenm */ 1265*1694Sdarrenm if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE || 1266*1694Sdarrenm mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE) 1267*1694Sdarrenm SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, 1268*1694Sdarrenm SHA384_DIGEST_LENGTH); 1269*1694Sdarrenm else 1270*1694Sdarrenm SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len); 1271*1694Sdarrenm 1272*1694Sdarrenm /* 1273*1694Sdarrenm * Do a SHA2 final on the outer context, storing the computed 1274*1694Sdarrenm * digest in the users buffer. 1275*1694Sdarrenm */ 1276*1694Sdarrenm switch (mac->cd_format) { 1277*1694Sdarrenm case CRYPTO_DATA_RAW: 1278*1694Sdarrenm if (digest_len != sha_digest_len) { 1279*1694Sdarrenm /* 1280*1694Sdarrenm * The caller requested a short digest. Digest 1281*1694Sdarrenm * into a scratch buffer and return to 1282*1694Sdarrenm * the user only what was requested. 1283*1694Sdarrenm */ 1284*1694Sdarrenm SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext); 1285*1694Sdarrenm bcopy(digest, (unsigned char *)mac->cd_raw.iov_base + 1286*1694Sdarrenm mac->cd_offset, digest_len); 1287*1694Sdarrenm } else { 1288*1694Sdarrenm SHA2Final((unsigned char *)mac->cd_raw.iov_base + 1289*1694Sdarrenm mac->cd_offset, &sha2_hmac_ctx.hc_ocontext); 1290*1694Sdarrenm } 1291*1694Sdarrenm break; 1292*1694Sdarrenm case CRYPTO_DATA_UIO: 1293*1694Sdarrenm ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac, 1294*1694Sdarrenm digest_len, digest); 1295*1694Sdarrenm break; 1296*1694Sdarrenm case CRYPTO_DATA_MBLK: 1297*1694Sdarrenm ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac, 1298*1694Sdarrenm digest_len, digest); 1299*1694Sdarrenm break; 1300*1694Sdarrenm default: 1301*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 1302*1694Sdarrenm } 1303*1694Sdarrenm 1304*1694Sdarrenm if (ret == CRYPTO_SUCCESS) { 1305*1694Sdarrenm mac->cd_length = digest_len; 1306*1694Sdarrenm return (CRYPTO_SUCCESS); 1307*1694Sdarrenm } 1308*1694Sdarrenm bail: 1309*1694Sdarrenm bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t)); 1310*1694Sdarrenm mac->cd_length = 0; 1311*1694Sdarrenm return (ret); 1312*1694Sdarrenm } 1313*1694Sdarrenm 1314*1694Sdarrenm /* ARGSUSED */ 1315*1694Sdarrenm static int 1316*1694Sdarrenm sha2_mac_verify_atomic(crypto_provider_handle_t provider, 1317*1694Sdarrenm crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1318*1694Sdarrenm crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1319*1694Sdarrenm crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req) 1320*1694Sdarrenm { 1321*1694Sdarrenm int ret = CRYPTO_SUCCESS; 1322*1694Sdarrenm uchar_t digest[SHA512_DIGEST_LENGTH]; 1323*1694Sdarrenm sha2_hmac_ctx_t sha2_hmac_ctx; 1324*1694Sdarrenm uint32_t sha_digest_len, digest_len, sha_hmac_block_size; 1325*1694Sdarrenm uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length); 1326*1694Sdarrenm 1327*1694Sdarrenm /* 1328*1694Sdarrenm * Set the digest length and block size to values approriate to the 1329*1694Sdarrenm * mechanism 1330*1694Sdarrenm */ 1331*1694Sdarrenm switch (mechanism->cm_type) { 1332*1694Sdarrenm case SHA256_HMAC_MECH_INFO_TYPE: 1333*1694Sdarrenm case SHA256_HMAC_GEN_MECH_INFO_TYPE: 1334*1694Sdarrenm sha_digest_len = digest_len = SHA256_DIGEST_LENGTH; 1335*1694Sdarrenm sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE; 1336*1694Sdarrenm break; 1337*1694Sdarrenm case SHA384_HMAC_MECH_INFO_TYPE: 1338*1694Sdarrenm case SHA384_HMAC_GEN_MECH_INFO_TYPE: 1339*1694Sdarrenm case SHA512_HMAC_MECH_INFO_TYPE: 1340*1694Sdarrenm case SHA512_HMAC_GEN_MECH_INFO_TYPE: 1341*1694Sdarrenm sha_digest_len = digest_len = SHA512_DIGEST_LENGTH; 1342*1694Sdarrenm sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE; 1343*1694Sdarrenm break; 1344*1694Sdarrenm default: 1345*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 1346*1694Sdarrenm } 1347*1694Sdarrenm 1348*1694Sdarrenm /* Add support for key by attributes (RFE 4706552) */ 1349*1694Sdarrenm if (key->ck_format != CRYPTO_KEY_RAW) 1350*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 1351*1694Sdarrenm 1352*1694Sdarrenm if (ctx_template != NULL) { 1353*1694Sdarrenm /* reuse context template */ 1354*1694Sdarrenm bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t)); 1355*1694Sdarrenm } else { 1356*1694Sdarrenm /* no context template, initialize context */ 1357*1694Sdarrenm if (keylen_in_bytes > sha_hmac_block_size) { 1358*1694Sdarrenm /* 1359*1694Sdarrenm * Hash the passed-in key to get a smaller key. 1360*1694Sdarrenm * The inner context is used since it hasn't been 1361*1694Sdarrenm * initialized yet. 1362*1694Sdarrenm */ 1363*1694Sdarrenm PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3, 1364*1694Sdarrenm &sha2_hmac_ctx.hc_icontext, 1365*1694Sdarrenm key->ck_data, keylen_in_bytes, digest); 1366*1694Sdarrenm sha2_mac_init_ctx(&sha2_hmac_ctx, digest, 1367*1694Sdarrenm sha_digest_len); 1368*1694Sdarrenm } else { 1369*1694Sdarrenm sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data, 1370*1694Sdarrenm keylen_in_bytes); 1371*1694Sdarrenm } 1372*1694Sdarrenm } 1373*1694Sdarrenm 1374*1694Sdarrenm /* get the mechanism parameters, if applicable */ 1375*1694Sdarrenm if (mechanism->cm_type % 3 == 2) { 1376*1694Sdarrenm if (mechanism->cm_param == NULL || 1377*1694Sdarrenm mechanism->cm_param_len != sizeof (ulong_t)) { 1378*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1379*1694Sdarrenm goto bail; 1380*1694Sdarrenm } 1381*1694Sdarrenm PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len); 1382*1694Sdarrenm if (digest_len > sha_digest_len) { 1383*1694Sdarrenm ret = CRYPTO_MECHANISM_PARAM_INVALID; 1384*1694Sdarrenm goto bail; 1385*1694Sdarrenm } 1386*1694Sdarrenm } 1387*1694Sdarrenm 1388*1694Sdarrenm if (mac->cd_length != digest_len) { 1389*1694Sdarrenm ret = CRYPTO_INVALID_MAC; 1390*1694Sdarrenm goto bail; 1391*1694Sdarrenm } 1392*1694Sdarrenm 1393*1694Sdarrenm /* do a SHA2 update of the inner context using the specified data */ 1394*1694Sdarrenm SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret); 1395*1694Sdarrenm if (ret != CRYPTO_SUCCESS) 1396*1694Sdarrenm /* the update failed, free context and bail */ 1397*1694Sdarrenm goto bail; 1398*1694Sdarrenm 1399*1694Sdarrenm /* do a SHA2 final on the inner context */ 1400*1694Sdarrenm SHA2Final(digest, &sha2_hmac_ctx.hc_icontext); 1401*1694Sdarrenm 1402*1694Sdarrenm /* 1403*1694Sdarrenm * Do an SHA2 update on the outer context, feeding the inner 1404*1694Sdarrenm * digest as data. 1405*1694Sdarrenm */ 1406*1694Sdarrenm SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len); 1407*1694Sdarrenm 1408*1694Sdarrenm /* 1409*1694Sdarrenm * Do a SHA2 final on the outer context, storing the computed 1410*1694Sdarrenm * digest in the users buffer. 1411*1694Sdarrenm */ 1412*1694Sdarrenm SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext); 1413*1694Sdarrenm 1414*1694Sdarrenm /* 1415*1694Sdarrenm * Compare the computed digest against the expected digest passed 1416*1694Sdarrenm * as argument. 1417*1694Sdarrenm */ 1418*1694Sdarrenm 1419*1694Sdarrenm switch (mac->cd_format) { 1420*1694Sdarrenm 1421*1694Sdarrenm case CRYPTO_DATA_RAW: 1422*1694Sdarrenm if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base + 1423*1694Sdarrenm mac->cd_offset, digest_len) != 0) 1424*1694Sdarrenm ret = CRYPTO_INVALID_MAC; 1425*1694Sdarrenm break; 1426*1694Sdarrenm 1427*1694Sdarrenm case CRYPTO_DATA_UIO: { 1428*1694Sdarrenm off_t offset = mac->cd_offset; 1429*1694Sdarrenm uint_t vec_idx; 1430*1694Sdarrenm off_t scratch_offset = 0; 1431*1694Sdarrenm size_t length = digest_len; 1432*1694Sdarrenm size_t cur_len; 1433*1694Sdarrenm 1434*1694Sdarrenm /* we support only kernel buffer */ 1435*1694Sdarrenm if (mac->cd_uio->uio_segflg != UIO_SYSSPACE) 1436*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 1437*1694Sdarrenm 1438*1694Sdarrenm /* jump to the first iovec containing the expected digest */ 1439*1694Sdarrenm for (vec_idx = 0; 1440*1694Sdarrenm offset >= mac->cd_uio->uio_iov[vec_idx].iov_len && 1441*1694Sdarrenm vec_idx < mac->cd_uio->uio_iovcnt; 1442*1694Sdarrenm offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len); 1443*1694Sdarrenm if (vec_idx == mac->cd_uio->uio_iovcnt) { 1444*1694Sdarrenm /* 1445*1694Sdarrenm * The caller specified an offset that is 1446*1694Sdarrenm * larger than the total size of the buffers 1447*1694Sdarrenm * it provided. 1448*1694Sdarrenm */ 1449*1694Sdarrenm ret = CRYPTO_DATA_LEN_RANGE; 1450*1694Sdarrenm break; 1451*1694Sdarrenm } 1452*1694Sdarrenm 1453*1694Sdarrenm /* do the comparison of computed digest vs specified one */ 1454*1694Sdarrenm while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) { 1455*1694Sdarrenm cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len - 1456*1694Sdarrenm offset, length); 1457*1694Sdarrenm 1458*1694Sdarrenm if (bcmp(digest + scratch_offset, 1459*1694Sdarrenm mac->cd_uio->uio_iov[vec_idx].iov_base + offset, 1460*1694Sdarrenm cur_len) != 0) { 1461*1694Sdarrenm ret = CRYPTO_INVALID_MAC; 1462*1694Sdarrenm break; 1463*1694Sdarrenm } 1464*1694Sdarrenm 1465*1694Sdarrenm length -= cur_len; 1466*1694Sdarrenm vec_idx++; 1467*1694Sdarrenm scratch_offset += cur_len; 1468*1694Sdarrenm offset = 0; 1469*1694Sdarrenm } 1470*1694Sdarrenm break; 1471*1694Sdarrenm } 1472*1694Sdarrenm 1473*1694Sdarrenm case CRYPTO_DATA_MBLK: { 1474*1694Sdarrenm off_t offset = mac->cd_offset; 1475*1694Sdarrenm mblk_t *mp; 1476*1694Sdarrenm off_t scratch_offset = 0; 1477*1694Sdarrenm size_t length = digest_len; 1478*1694Sdarrenm size_t cur_len; 1479*1694Sdarrenm 1480*1694Sdarrenm /* jump to the first mblk_t containing the expected digest */ 1481*1694Sdarrenm for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp); 1482*1694Sdarrenm offset -= MBLKL(mp), mp = mp->b_cont); 1483*1694Sdarrenm if (mp == NULL) { 1484*1694Sdarrenm /* 1485*1694Sdarrenm * The caller specified an offset that is larger than 1486*1694Sdarrenm * the total size of the buffers it provided. 1487*1694Sdarrenm */ 1488*1694Sdarrenm ret = CRYPTO_DATA_LEN_RANGE; 1489*1694Sdarrenm break; 1490*1694Sdarrenm } 1491*1694Sdarrenm 1492*1694Sdarrenm while (mp != NULL && length > 0) { 1493*1694Sdarrenm cur_len = MIN(MBLKL(mp) - offset, length); 1494*1694Sdarrenm if (bcmp(digest + scratch_offset, 1495*1694Sdarrenm mp->b_rptr + offset, cur_len) != 0) { 1496*1694Sdarrenm ret = CRYPTO_INVALID_MAC; 1497*1694Sdarrenm break; 1498*1694Sdarrenm } 1499*1694Sdarrenm 1500*1694Sdarrenm length -= cur_len; 1501*1694Sdarrenm mp = mp->b_cont; 1502*1694Sdarrenm scratch_offset += cur_len; 1503*1694Sdarrenm offset = 0; 1504*1694Sdarrenm } 1505*1694Sdarrenm break; 1506*1694Sdarrenm } 1507*1694Sdarrenm 1508*1694Sdarrenm default: 1509*1694Sdarrenm ret = CRYPTO_ARGUMENTS_BAD; 1510*1694Sdarrenm } 1511*1694Sdarrenm 1512*1694Sdarrenm return (ret); 1513*1694Sdarrenm bail: 1514*1694Sdarrenm bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t)); 1515*1694Sdarrenm mac->cd_length = 0; 1516*1694Sdarrenm return (ret); 1517*1694Sdarrenm } 1518*1694Sdarrenm 1519*1694Sdarrenm /* 1520*1694Sdarrenm * KCF software provider context management entry points. 1521*1694Sdarrenm */ 1522*1694Sdarrenm 1523*1694Sdarrenm /* ARGSUSED */ 1524*1694Sdarrenm static int 1525*1694Sdarrenm sha2_create_ctx_template(crypto_provider_handle_t provider, 1526*1694Sdarrenm crypto_mechanism_t *mechanism, crypto_key_t *key, 1527*1694Sdarrenm crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size, 1528*1694Sdarrenm crypto_req_handle_t req) 1529*1694Sdarrenm { 1530*1694Sdarrenm sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl; 1531*1694Sdarrenm uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length); 1532*1694Sdarrenm uint32_t sha_digest_len, sha_hmac_block_size; 1533*1694Sdarrenm 1534*1694Sdarrenm /* 1535*1694Sdarrenm * Set the digest length and block size to values approriate to the 1536*1694Sdarrenm * mechanism 1537*1694Sdarrenm */ 1538*1694Sdarrenm switch (mechanism->cm_type) { 1539*1694Sdarrenm case SHA256_HMAC_MECH_INFO_TYPE: 1540*1694Sdarrenm case SHA256_HMAC_GEN_MECH_INFO_TYPE: 1541*1694Sdarrenm sha_digest_len = SHA256_DIGEST_LENGTH; 1542*1694Sdarrenm sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE; 1543*1694Sdarrenm break; 1544*1694Sdarrenm case SHA384_HMAC_MECH_INFO_TYPE: 1545*1694Sdarrenm case SHA384_HMAC_GEN_MECH_INFO_TYPE: 1546*1694Sdarrenm case SHA512_HMAC_MECH_INFO_TYPE: 1547*1694Sdarrenm case SHA512_HMAC_GEN_MECH_INFO_TYPE: 1548*1694Sdarrenm sha_digest_len = SHA512_DIGEST_LENGTH; 1549*1694Sdarrenm sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE; 1550*1694Sdarrenm break; 1551*1694Sdarrenm default: 1552*1694Sdarrenm return (CRYPTO_MECHANISM_INVALID); 1553*1694Sdarrenm } 1554*1694Sdarrenm 1555*1694Sdarrenm /* Add support for key by attributes (RFE 4706552) */ 1556*1694Sdarrenm if (key->ck_format != CRYPTO_KEY_RAW) 1557*1694Sdarrenm return (CRYPTO_ARGUMENTS_BAD); 1558*1694Sdarrenm 1559*1694Sdarrenm /* 1560*1694Sdarrenm * Allocate and initialize SHA2 context. 1561*1694Sdarrenm */ 1562*1694Sdarrenm sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t), 1563*1694Sdarrenm crypto_kmflag(req)); 1564*1694Sdarrenm if (sha2_hmac_ctx_tmpl == NULL) 1565*1694Sdarrenm return (CRYPTO_HOST_MEMORY); 1566*1694Sdarrenm 1567*1694Sdarrenm sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type; 1568*1694Sdarrenm 1569*1694Sdarrenm if (keylen_in_bytes > sha_hmac_block_size) { 1570*1694Sdarrenm uchar_t digested_key[SHA512_DIGEST_LENGTH]; 1571*1694Sdarrenm 1572*1694Sdarrenm /* 1573*1694Sdarrenm * Hash the passed-in key to get a smaller key. 1574*1694Sdarrenm * The inner context is used since it hasn't been 1575*1694Sdarrenm * initialized yet. 1576*1694Sdarrenm */ 1577*1694Sdarrenm PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3, 1578*1694Sdarrenm &sha2_hmac_ctx_tmpl->hc_icontext, 1579*1694Sdarrenm key->ck_data, keylen_in_bytes, digested_key); 1580*1694Sdarrenm sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key, 1581*1694Sdarrenm sha_digest_len); 1582*1694Sdarrenm } else { 1583*1694Sdarrenm sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data, 1584*1694Sdarrenm keylen_in_bytes); 1585*1694Sdarrenm } 1586*1694Sdarrenm 1587*1694Sdarrenm *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl; 1588*1694Sdarrenm *ctx_template_size = sizeof (sha2_hmac_ctx_t); 1589*1694Sdarrenm 1590*1694Sdarrenm return (CRYPTO_SUCCESS); 1591*1694Sdarrenm } 1592*1694Sdarrenm 1593*1694Sdarrenm static int 1594*1694Sdarrenm sha2_free_context(crypto_ctx_t *ctx) 1595*1694Sdarrenm { 1596*1694Sdarrenm uint_t ctx_len; 1597*1694Sdarrenm 1598*1694Sdarrenm if (ctx->cc_provider_private == NULL) 1599*1694Sdarrenm return (CRYPTO_SUCCESS); 1600*1694Sdarrenm 1601*1694Sdarrenm /* 1602*1694Sdarrenm * We have to free either SHA2 or SHA2-HMAC contexts, which 1603*1694Sdarrenm * have different lengths. 1604*1694Sdarrenm * 1605*1694Sdarrenm * Note: Below is dependent on the mechanism ordering. 1606*1694Sdarrenm */ 1607*1694Sdarrenm 1608*1694Sdarrenm if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0) 1609*1694Sdarrenm ctx_len = sizeof (sha2_ctx_t); 1610*1694Sdarrenm else 1611*1694Sdarrenm ctx_len = sizeof (sha2_hmac_ctx_t); 1612*1694Sdarrenm 1613*1694Sdarrenm bzero(ctx->cc_provider_private, ctx_len); 1614*1694Sdarrenm kmem_free(ctx->cc_provider_private, ctx_len); 1615*1694Sdarrenm ctx->cc_provider_private = NULL; 1616*1694Sdarrenm 1617*1694Sdarrenm return (CRYPTO_SUCCESS); 1618*1694Sdarrenm } 1619