1*7188Smcpowers /* 2*7188Smcpowers * CDDL HEADER START 3*7188Smcpowers * 4*7188Smcpowers * The contents of this file are subject to the terms of the 5*7188Smcpowers * Common Development and Distribution License (the "License"). 6*7188Smcpowers * You may not use this file except in compliance with the License. 7*7188Smcpowers * 8*7188Smcpowers * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9*7188Smcpowers * or http://www.opensolaris.org/os/licensing. 10*7188Smcpowers * See the License for the specific language governing permissions 11*7188Smcpowers * and limitations under the License. 12*7188Smcpowers * 13*7188Smcpowers * When distributing Covered Code, include this CDDL HEADER in each 14*7188Smcpowers * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15*7188Smcpowers * If applicable, add the following below this CDDL HEADER, with the 16*7188Smcpowers * fields enclosed by brackets "[]" replaced with your own identifying 17*7188Smcpowers * information: Portions Copyright [yyyy] [name of copyright owner] 18*7188Smcpowers * 19*7188Smcpowers * CDDL HEADER END 20*7188Smcpowers */ 21*7188Smcpowers /* 22*7188Smcpowers * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23*7188Smcpowers * Use is subject to license terms. 24*7188Smcpowers */ 25*7188Smcpowers 26*7188Smcpowers #pragma ident "%Z%%M% %I% %E% SMI" 27*7188Smcpowers 28*7188Smcpowers #ifndef _KERNEL 29*7188Smcpowers #include <strings.h> 30*7188Smcpowers #include <limits.h> 31*7188Smcpowers #include <assert.h> 32*7188Smcpowers #include <security/cryptoki.h> 33*7188Smcpowers #endif 34*7188Smcpowers 35*7188Smcpowers #include <sys/types.h> 36*7188Smcpowers #include <modes/modes.h> 37*7188Smcpowers #include <sys/crypto/common.h> 38*7188Smcpowers #include <sys/crypto/impl.h> 39*7188Smcpowers 40*7188Smcpowers /* 41*7188Smcpowers * Algorithm independent CBC functions. 42*7188Smcpowers */ 43*7188Smcpowers int 44*7188Smcpowers cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length, 45*7188Smcpowers crypto_data_t *out, size_t block_size, 46*7188Smcpowers int (*encrypt)(const void *, const uint8_t *, uint8_t *), 47*7188Smcpowers void (*copy_block)(uint8_t *, uint8_t *), 48*7188Smcpowers void (*xor_block)(uint8_t *, uint8_t *)) 49*7188Smcpowers { 50*7188Smcpowers size_t remainder = length; 51*7188Smcpowers size_t need; 52*7188Smcpowers uint8_t *datap = (uint8_t *)data; 53*7188Smcpowers uint8_t *blockp; 54*7188Smcpowers uint8_t *lastp; 55*7188Smcpowers void *iov_or_mp; 56*7188Smcpowers offset_t offset; 57*7188Smcpowers uint8_t *out_data_1; 58*7188Smcpowers uint8_t *out_data_2; 59*7188Smcpowers size_t out_data_1_len; 60*7188Smcpowers 61*7188Smcpowers if (length + ctx->cc_remainder_len < block_size) { 62*7188Smcpowers /* accumulate bytes here and return */ 63*7188Smcpowers bcopy(datap, 64*7188Smcpowers (uint8_t *)ctx->cc_remainder + ctx->cc_remainder_len, 65*7188Smcpowers length); 66*7188Smcpowers ctx->cc_remainder_len += length; 67*7188Smcpowers ctx->cc_copy_to = datap; 68*7188Smcpowers return (CRYPTO_SUCCESS); 69*7188Smcpowers } 70*7188Smcpowers 71*7188Smcpowers lastp = (uint8_t *)ctx->cc_iv; 72*7188Smcpowers if (out != NULL) 73*7188Smcpowers crypto_init_ptrs(out, &iov_or_mp, &offset); 74*7188Smcpowers 75*7188Smcpowers do { 76*7188Smcpowers /* Unprocessed data from last call. */ 77*7188Smcpowers if (ctx->cc_remainder_len > 0) { 78*7188Smcpowers need = block_size - ctx->cc_remainder_len; 79*7188Smcpowers 80*7188Smcpowers if (need > remainder) 81*7188Smcpowers return (CRYPTO_DATA_LEN_RANGE); 82*7188Smcpowers 83*7188Smcpowers bcopy(datap, &((uint8_t *)ctx->cc_remainder) 84*7188Smcpowers [ctx->cc_remainder_len], need); 85*7188Smcpowers 86*7188Smcpowers blockp = (uint8_t *)ctx->cc_remainder; 87*7188Smcpowers } else { 88*7188Smcpowers blockp = datap; 89*7188Smcpowers } 90*7188Smcpowers 91*7188Smcpowers if (out == NULL) { 92*7188Smcpowers /* 93*7188Smcpowers * XOR the previous cipher block or IV with the 94*7188Smcpowers * current clear block. 95*7188Smcpowers */ 96*7188Smcpowers xor_block(lastp, blockp); 97*7188Smcpowers encrypt(ctx->cc_keysched, blockp, blockp); 98*7188Smcpowers 99*7188Smcpowers ctx->cc_lastp = blockp; 100*7188Smcpowers lastp = blockp; 101*7188Smcpowers 102*7188Smcpowers if (ctx->cc_remainder_len > 0) { 103*7188Smcpowers bcopy(blockp, ctx->cc_copy_to, 104*7188Smcpowers ctx->cc_remainder_len); 105*7188Smcpowers bcopy(blockp + ctx->cc_remainder_len, datap, 106*7188Smcpowers need); 107*7188Smcpowers } 108*7188Smcpowers } else { 109*7188Smcpowers /* 110*7188Smcpowers * XOR the previous cipher block or IV with the 111*7188Smcpowers * current clear block. 112*7188Smcpowers */ 113*7188Smcpowers xor_block(blockp, lastp); 114*7188Smcpowers encrypt(ctx->cc_keysched, lastp, lastp); 115*7188Smcpowers crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 116*7188Smcpowers &out_data_1_len, &out_data_2, block_size); 117*7188Smcpowers 118*7188Smcpowers /* copy block to where it belongs */ 119*7188Smcpowers if (out_data_1_len == block_size) { 120*7188Smcpowers copy_block(lastp, out_data_1); 121*7188Smcpowers } else { 122*7188Smcpowers bcopy(lastp, out_data_1, out_data_1_len); 123*7188Smcpowers if (out_data_2 != NULL) { 124*7188Smcpowers bcopy(lastp + out_data_1_len, 125*7188Smcpowers out_data_2, 126*7188Smcpowers block_size - out_data_1_len); 127*7188Smcpowers } 128*7188Smcpowers } 129*7188Smcpowers /* update offset */ 130*7188Smcpowers out->cd_offset += block_size; 131*7188Smcpowers } 132*7188Smcpowers 133*7188Smcpowers /* Update pointer to next block of data to be processed. */ 134*7188Smcpowers if (ctx->cc_remainder_len != 0) { 135*7188Smcpowers datap += need; 136*7188Smcpowers ctx->cc_remainder_len = 0; 137*7188Smcpowers } else { 138*7188Smcpowers datap += block_size; 139*7188Smcpowers } 140*7188Smcpowers 141*7188Smcpowers remainder = (size_t)&data[length] - (size_t)datap; 142*7188Smcpowers 143*7188Smcpowers /* Incomplete last block. */ 144*7188Smcpowers if (remainder > 0 && remainder < block_size) { 145*7188Smcpowers bcopy(datap, ctx->cc_remainder, remainder); 146*7188Smcpowers ctx->cc_remainder_len = remainder; 147*7188Smcpowers ctx->cc_copy_to = datap; 148*7188Smcpowers goto out; 149*7188Smcpowers } 150*7188Smcpowers ctx->cc_copy_to = NULL; 151*7188Smcpowers 152*7188Smcpowers } while (remainder > 0); 153*7188Smcpowers 154*7188Smcpowers out: 155*7188Smcpowers /* 156*7188Smcpowers * Save the last encrypted block in the context. 157*7188Smcpowers */ 158*7188Smcpowers if (ctx->cc_lastp != NULL) { 159*7188Smcpowers copy_block((uint8_t *)ctx->cc_lastp, (uint8_t *)ctx->cc_iv); 160*7188Smcpowers ctx->cc_lastp = (uint8_t *)ctx->cc_iv; 161*7188Smcpowers } 162*7188Smcpowers 163*7188Smcpowers return (CRYPTO_SUCCESS); 164*7188Smcpowers } 165*7188Smcpowers 166*7188Smcpowers #define OTHER(a, ctx) \ 167*7188Smcpowers (((a) == (ctx)->cc_lastblock) ? (ctx)->cc_iv : (ctx)->cc_lastblock) 168*7188Smcpowers 169*7188Smcpowers /* ARGSUSED */ 170*7188Smcpowers int 171*7188Smcpowers cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length, 172*7188Smcpowers crypto_data_t *out, size_t block_size, 173*7188Smcpowers int (*decrypt)(const void *, const uint8_t *, uint8_t *), 174*7188Smcpowers void (*copy_block)(uint8_t *, uint8_t *), 175*7188Smcpowers void (*xor_block)(uint8_t *, uint8_t *)) 176*7188Smcpowers { 177*7188Smcpowers size_t remainder = length; 178*7188Smcpowers size_t need; 179*7188Smcpowers uint8_t *datap = (uint8_t *)data; 180*7188Smcpowers uint8_t *blockp; 181*7188Smcpowers uint8_t *lastp; 182*7188Smcpowers void *iov_or_mp; 183*7188Smcpowers offset_t offset; 184*7188Smcpowers uint8_t *out_data_1; 185*7188Smcpowers uint8_t *out_data_2; 186*7188Smcpowers size_t out_data_1_len; 187*7188Smcpowers 188*7188Smcpowers if (length + ctx->cc_remainder_len < block_size) { 189*7188Smcpowers /* accumulate bytes here and return */ 190*7188Smcpowers bcopy(datap, 191*7188Smcpowers (uint8_t *)ctx->cc_remainder + ctx->cc_remainder_len, 192*7188Smcpowers length); 193*7188Smcpowers ctx->cc_remainder_len += length; 194*7188Smcpowers ctx->cc_copy_to = datap; 195*7188Smcpowers return (CRYPTO_SUCCESS); 196*7188Smcpowers } 197*7188Smcpowers 198*7188Smcpowers lastp = ctx->cc_lastp; 199*7188Smcpowers if (out != NULL) 200*7188Smcpowers crypto_init_ptrs(out, &iov_or_mp, &offset); 201*7188Smcpowers 202*7188Smcpowers do { 203*7188Smcpowers /* Unprocessed data from last call. */ 204*7188Smcpowers if (ctx->cc_remainder_len > 0) { 205*7188Smcpowers need = block_size - ctx->cc_remainder_len; 206*7188Smcpowers 207*7188Smcpowers if (need > remainder) 208*7188Smcpowers return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 209*7188Smcpowers 210*7188Smcpowers bcopy(datap, &((uint8_t *)ctx->cc_remainder) 211*7188Smcpowers [ctx->cc_remainder_len], need); 212*7188Smcpowers 213*7188Smcpowers blockp = (uint8_t *)ctx->cc_remainder; 214*7188Smcpowers } else { 215*7188Smcpowers blockp = datap; 216*7188Smcpowers } 217*7188Smcpowers 218*7188Smcpowers /* LINTED: pointer alignment */ 219*7188Smcpowers copy_block(blockp, (uint8_t *)OTHER((uint64_t *)lastp, ctx)); 220*7188Smcpowers 221*7188Smcpowers if (out != NULL) { 222*7188Smcpowers decrypt(ctx->cc_keysched, blockp, 223*7188Smcpowers (uint8_t *)ctx->cc_remainder); 224*7188Smcpowers blockp = (uint8_t *)ctx->cc_remainder; 225*7188Smcpowers } else { 226*7188Smcpowers decrypt(ctx->cc_keysched, blockp, blockp); 227*7188Smcpowers } 228*7188Smcpowers 229*7188Smcpowers /* 230*7188Smcpowers * XOR the previous cipher block or IV with the 231*7188Smcpowers * currently decrypted block. 232*7188Smcpowers */ 233*7188Smcpowers xor_block(lastp, blockp); 234*7188Smcpowers 235*7188Smcpowers /* LINTED: pointer alignment */ 236*7188Smcpowers lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx); 237*7188Smcpowers 238*7188Smcpowers if (out != NULL) { 239*7188Smcpowers crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 240*7188Smcpowers &out_data_1_len, &out_data_2, block_size); 241*7188Smcpowers 242*7188Smcpowers bcopy(blockp, out_data_1, out_data_1_len); 243*7188Smcpowers if (out_data_2 != NULL) { 244*7188Smcpowers bcopy(blockp + out_data_1_len, out_data_2, 245*7188Smcpowers block_size - out_data_1_len); 246*7188Smcpowers } 247*7188Smcpowers 248*7188Smcpowers /* update offset */ 249*7188Smcpowers out->cd_offset += block_size; 250*7188Smcpowers 251*7188Smcpowers } else if (ctx->cc_remainder_len > 0) { 252*7188Smcpowers /* copy temporary block to where it belongs */ 253*7188Smcpowers bcopy(blockp, ctx->cc_copy_to, ctx->cc_remainder_len); 254*7188Smcpowers bcopy(blockp + ctx->cc_remainder_len, datap, need); 255*7188Smcpowers } 256*7188Smcpowers 257*7188Smcpowers /* Update pointer to next block of data to be processed. */ 258*7188Smcpowers if (ctx->cc_remainder_len != 0) { 259*7188Smcpowers datap += need; 260*7188Smcpowers ctx->cc_remainder_len = 0; 261*7188Smcpowers } else { 262*7188Smcpowers datap += block_size; 263*7188Smcpowers } 264*7188Smcpowers 265*7188Smcpowers remainder = (size_t)&data[length] - (size_t)datap; 266*7188Smcpowers 267*7188Smcpowers /* Incomplete last block. */ 268*7188Smcpowers if (remainder > 0 && remainder < block_size) { 269*7188Smcpowers bcopy(datap, ctx->cc_remainder, remainder); 270*7188Smcpowers ctx->cc_remainder_len = remainder; 271*7188Smcpowers ctx->cc_lastp = lastp; 272*7188Smcpowers ctx->cc_copy_to = datap; 273*7188Smcpowers return (CRYPTO_SUCCESS); 274*7188Smcpowers } 275*7188Smcpowers ctx->cc_copy_to = NULL; 276*7188Smcpowers 277*7188Smcpowers } while (remainder > 0); 278*7188Smcpowers 279*7188Smcpowers ctx->cc_lastp = lastp; 280*7188Smcpowers return (CRYPTO_SUCCESS); 281*7188Smcpowers } 282*7188Smcpowers 283*7188Smcpowers int 284*7188Smcpowers cbc_init_ctx(cbc_ctx_t *cbc_ctx, char *param, size_t param_len, 285*7188Smcpowers size_t block_size, void (*copy_block)(uint8_t *, uint64_t *)) 286*7188Smcpowers { 287*7188Smcpowers /* 288*7188Smcpowers * Copy IV into context. 289*7188Smcpowers * 290*7188Smcpowers * If cm_param == NULL then the IV comes from the 291*7188Smcpowers * cd_miscdata field in the crypto_data structure. 292*7188Smcpowers */ 293*7188Smcpowers if (param != NULL) { 294*7188Smcpowers #ifdef _KERNEL 295*7188Smcpowers ASSERT(param_len == block_size); 296*7188Smcpowers #else 297*7188Smcpowers assert(param_len == block_size); 298*7188Smcpowers #endif 299*7188Smcpowers copy_block((uchar_t *)param, cbc_ctx->cc_iv); 300*7188Smcpowers } 301*7188Smcpowers 302*7188Smcpowers cbc_ctx->cc_lastp = (uint8_t *)&cbc_ctx->cc_iv[0]; 303*7188Smcpowers cbc_ctx->cc_flags |= CBC_MODE; 304*7188Smcpowers return (CRYPTO_SUCCESS); 305*7188Smcpowers } 306*7188Smcpowers 307*7188Smcpowers /* ARGSUSED */ 308*7188Smcpowers void * 309*7188Smcpowers cbc_alloc_ctx(int kmflag) 310*7188Smcpowers { 311*7188Smcpowers cbc_ctx_t *cbc_ctx; 312*7188Smcpowers 313*7188Smcpowers #ifdef _KERNEL 314*7188Smcpowers if ((cbc_ctx = kmem_zalloc(sizeof (cbc_ctx_t), kmflag)) == NULL) 315*7188Smcpowers #else 316*7188Smcpowers if ((cbc_ctx = calloc(1, sizeof (cbc_ctx_t))) == NULL) 317*7188Smcpowers #endif 318*7188Smcpowers return (NULL); 319*7188Smcpowers 320*7188Smcpowers cbc_ctx->cc_flags = CBC_MODE; 321*7188Smcpowers return (cbc_ctx); 322*7188Smcpowers } 323