123c57df7Smcpowers /*
223c57df7Smcpowers * CDDL HEADER START
323c57df7Smcpowers *
423c57df7Smcpowers * The contents of this file are subject to the terms of the
523c57df7Smcpowers * Common Development and Distribution License (the "License").
623c57df7Smcpowers * You may not use this file except in compliance with the License.
723c57df7Smcpowers *
823c57df7Smcpowers * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
923c57df7Smcpowers * or http://www.opensolaris.org/os/licensing.
1023c57df7Smcpowers * See the License for the specific language governing permissions
1123c57df7Smcpowers * and limitations under the License.
1223c57df7Smcpowers *
1323c57df7Smcpowers * When distributing Covered Code, include this CDDL HEADER in each
1423c57df7Smcpowers * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
1523c57df7Smcpowers * If applicable, add the following below this CDDL HEADER, with the
1623c57df7Smcpowers * fields enclosed by brackets "[]" replaced with your own identifying
1723c57df7Smcpowers * information: Portions Copyright [yyyy] [name of copyright owner]
1823c57df7Smcpowers *
1923c57df7Smcpowers * CDDL HEADER END
2023c57df7Smcpowers */
2123c57df7Smcpowers /*
2223c57df7Smcpowers * Copyright 2008 Sun Microsystems, Inc. All rights reserved.
2323c57df7Smcpowers * Use is subject to license terms.
24*2f9f8a9bSJason King *
25*2f9f8a9bSJason King * Copyright 2019 Joyent, Inc.
2623c57df7Smcpowers */
2723c57df7Smcpowers
2823c57df7Smcpowers #ifndef _KERNEL
2923c57df7Smcpowers #include <strings.h>
3023c57df7Smcpowers #include <limits.h>
3123c57df7Smcpowers #include <assert.h>
3223c57df7Smcpowers #include <security/cryptoki.h>
3323c57df7Smcpowers #endif
3423c57df7Smcpowers
35*2f9f8a9bSJason King #include <sys/debug.h>
3623c57df7Smcpowers #include <sys/types.h>
3723c57df7Smcpowers #include <modes/modes.h>
3823c57df7Smcpowers #include <sys/crypto/common.h>
3923c57df7Smcpowers #include <sys/crypto/impl.h>
404b56a003SDaniel Anderson #include <sys/byteorder.h>
414b56a003SDaniel Anderson
4223c57df7Smcpowers /*
43*2f9f8a9bSJason King * CTR (counter mode) is a stream cipher. That is, it generates a
44*2f9f8a9bSJason King * pseudo-random keystream that is used to XOR with the input to
45*2f9f8a9bSJason King * encrypt or decrypt. The pseudo-random keystream is generated by
46*2f9f8a9bSJason King * concatenating a nonce (supplied during initialzation) and with a
47*2f9f8a9bSJason King * counter (initialized to zero) to form an input block to the cipher
48*2f9f8a9bSJason King * mechanism. The resulting output of the cipher is used as a chunk
49*2f9f8a9bSJason King * of the pseudo-random keystream. Once all of the bytes of the
50*2f9f8a9bSJason King * keystream block have been used, the counter is incremented and
51*2f9f8a9bSJason King * the process repeats.
52*2f9f8a9bSJason King *
53*2f9f8a9bSJason King * Since this is a stream cipher, we do not accumulate input cipher
54*2f9f8a9bSJason King * text like we do for block modes. Instead we use ctr_ctx_t->ctr_offset
55*2f9f8a9bSJason King * to track the amount of bytes used in the current keystream block.
5623c57df7Smcpowers */
57*2f9f8a9bSJason King
58*2f9f8a9bSJason King static void
ctr_new_keyblock(ctr_ctx_t * ctx,int (* cipher)(const void * ks,const uint8_t * pt,uint8_t * ct))59*2f9f8a9bSJason King ctr_new_keyblock(ctr_ctx_t *ctx,
60*2f9f8a9bSJason King int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
6123c57df7Smcpowers {
6216239bc8SMark Powers uint64_t lower_counter, upper_counter;
6323c57df7Smcpowers
64*2f9f8a9bSJason King /* increment the counter */
6516239bc8SMark Powers lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
6616239bc8SMark Powers lower_counter = htonll(lower_counter + 1);
6716239bc8SMark Powers lower_counter &= ctx->ctr_lower_mask;
6816239bc8SMark Powers ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
6916239bc8SMark Powers lower_counter;
7016239bc8SMark Powers
7116239bc8SMark Powers /* wrap around */
7216239bc8SMark Powers if (lower_counter == 0) {
73*2f9f8a9bSJason King upper_counter = ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
7416239bc8SMark Powers upper_counter = htonll(upper_counter + 1);
7516239bc8SMark Powers upper_counter &= ctx->ctr_upper_mask;
76*2f9f8a9bSJason King ctx->ctr_cb[0] = (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
7716239bc8SMark Powers upper_counter;
7816239bc8SMark Powers }
7923c57df7Smcpowers
80*2f9f8a9bSJason King /* generate the new keyblock */
81*2f9f8a9bSJason King cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
82*2f9f8a9bSJason King (uint8_t *)ctx->ctr_keystream);
83*2f9f8a9bSJason King ctx->ctr_offset = 0;
84*2f9f8a9bSJason King }
85*2f9f8a9bSJason King
8623c57df7Smcpowers /*
87*2f9f8a9bSJason King * XOR the input with the keystream and write the result to out.
88*2f9f8a9bSJason King * This requires that the amount of data in 'in' is >= outlen
89*2f9f8a9bSJason King * (ctr_mode_contiguous_blocks() guarantees this for us before we are
90*2f9f8a9bSJason King * called). As CTR mode is a stream cipher, we cannot use a cipher's
91*2f9f8a9bSJason King * xxx_xor_block function (e.g. aes_xor_block()) as we must handle
92*2f9f8a9bSJason King * arbitrary lengths of input and should not buffer/accumulate partial blocks
93*2f9f8a9bSJason King * between calls.
9423c57df7Smcpowers */
95*2f9f8a9bSJason King static void
ctr_xor(ctr_ctx_t * ctx,const uint8_t * in,uint8_t * out,size_t outlen,size_t block_size,int (* cipher)(const void * ks,const uint8_t * pt,uint8_t * ct))96*2f9f8a9bSJason King ctr_xor(ctr_ctx_t *ctx, const uint8_t *in, uint8_t *out, size_t outlen,
97*2f9f8a9bSJason King size_t block_size,
98*2f9f8a9bSJason King int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
99*2f9f8a9bSJason King {
100*2f9f8a9bSJason King const uint8_t *keyp;
101*2f9f8a9bSJason King size_t keyamt;
10223c57df7Smcpowers
103*2f9f8a9bSJason King while (outlen > 0) {
104*2f9f8a9bSJason King /*
105*2f9f8a9bSJason King * This occurs once we've consumed all the bytes in the
106*2f9f8a9bSJason King * current block of the keystream. ctr_init_ctx() creates
107*2f9f8a9bSJason King * the initial block of the keystream, so we always start
108*2f9f8a9bSJason King * with a full block of key data.
109*2f9f8a9bSJason King */
110*2f9f8a9bSJason King if (ctx->ctr_offset == block_size) {
111*2f9f8a9bSJason King ctr_new_keyblock(ctx, cipher);
11223c57df7Smcpowers }
11323c57df7Smcpowers
114*2f9f8a9bSJason King keyp = (uint8_t *)ctx->ctr_keystream + ctx->ctr_offset;
115*2f9f8a9bSJason King keyamt = block_size - ctx->ctr_offset;
116*2f9f8a9bSJason King
117*2f9f8a9bSJason King /*
118*2f9f8a9bSJason King * xor a byte at a time (while we have data and output
119*2f9f8a9bSJason King * space) and try to get in, out, and keyp 32-bit aligned.
120*2f9f8a9bSJason King * If in, out, and keyp all do become 32-bit aligned,
121*2f9f8a9bSJason King * we switch to xor-ing 32-bits at a time until we run out
122*2f9f8a9bSJason King * of 32-bit chunks, then switch back to xor-ing a byte at
123*2f9f8a9bSJason King * a time for any remainder.
124*2f9f8a9bSJason King */
125*2f9f8a9bSJason King while (keyamt > 0 && outlen > 0 &&
126*2f9f8a9bSJason King !IS_P2ALIGNED(in, sizeof (uint32_t)) &&
127*2f9f8a9bSJason King !IS_P2ALIGNED(out, sizeof (uint32_t)) &&
128*2f9f8a9bSJason King !IS_P2ALIGNED(keyp, sizeof (uint32_t))) {
129*2f9f8a9bSJason King *out++ = *in++ ^ *keyp++;
130*2f9f8a9bSJason King keyamt--;
131*2f9f8a9bSJason King outlen--;
13223c57df7Smcpowers }
13323c57df7Smcpowers
134*2f9f8a9bSJason King if (keyamt > 3 && outlen > 3 &&
135*2f9f8a9bSJason King IS_P2ALIGNED(in, sizeof (uint32_t)) &&
136*2f9f8a9bSJason King IS_P2ALIGNED(out, sizeof (uint32_t)) &&
137*2f9f8a9bSJason King IS_P2ALIGNED(keyp, sizeof (uint32_t))) {
138*2f9f8a9bSJason King const uint32_t *key32 = (const uint32_t *)keyp;
139*2f9f8a9bSJason King const uint32_t *in32 = (const uint32_t *)in;
140*2f9f8a9bSJason King uint32_t *out32 = (uint32_t *)out;
14123c57df7Smcpowers
142*2f9f8a9bSJason King do {
143*2f9f8a9bSJason King *out32++ = *in32++ ^ *key32++;
144*2f9f8a9bSJason King keyamt -= sizeof (uint32_t);
145*2f9f8a9bSJason King outlen -= sizeof (uint32_t);
146*2f9f8a9bSJason King } while (keyamt > 3 && outlen > 3);
147*2f9f8a9bSJason King
148*2f9f8a9bSJason King keyp = (const uint8_t *)key32;
149*2f9f8a9bSJason King in = (const uint8_t *)in32;
150*2f9f8a9bSJason King out = (uint8_t *)out32;
15123c57df7Smcpowers }
15223c57df7Smcpowers
153*2f9f8a9bSJason King while (keyamt > 0 && outlen > 0) {
154*2f9f8a9bSJason King *out++ = *in++ ^ *keyp++;
155*2f9f8a9bSJason King keyamt--;
156*2f9f8a9bSJason King outlen--;
157*2f9f8a9bSJason King }
15823c57df7Smcpowers
159*2f9f8a9bSJason King ctx->ctr_offset = block_size - keyamt;
160*2f9f8a9bSJason King }
161*2f9f8a9bSJason King }
162*2f9f8a9bSJason King
163*2f9f8a9bSJason King /*
164*2f9f8a9bSJason King * Encrypt and decrypt multiple blocks of data in counter mode.
165*2f9f8a9bSJason King */
166*2f9f8a9bSJason King int
ctr_mode_contiguous_blocks(ctr_ctx_t * ctx,char * in,size_t in_length,crypto_data_t * out,size_t block_size,int (* cipher)(const void * ks,const uint8_t * pt,uint8_t * ct))167*2f9f8a9bSJason King ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *in, size_t in_length,
168*2f9f8a9bSJason King crypto_data_t *out, size_t block_size,
169*2f9f8a9bSJason King int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
170*2f9f8a9bSJason King {
171*2f9f8a9bSJason King size_t in_remainder = in_length;
172*2f9f8a9bSJason King uint8_t *inp = (uint8_t *)in;
173*2f9f8a9bSJason King void *iov_or_mp;
174*2f9f8a9bSJason King offset_t offset;
175*2f9f8a9bSJason King uint8_t *out_data;
176*2f9f8a9bSJason King uint8_t *out_data_remainder;
177*2f9f8a9bSJason King size_t out_data_len;
178*2f9f8a9bSJason King
179*2f9f8a9bSJason King if (block_size > sizeof (ctx->ctr_keystream))
180*2f9f8a9bSJason King return (CRYPTO_ARGUMENTS_BAD);
181*2f9f8a9bSJason King
182*2f9f8a9bSJason King if (out == NULL)
183*2f9f8a9bSJason King return (CRYPTO_ARGUMENTS_BAD);
184*2f9f8a9bSJason King
185*2f9f8a9bSJason King /* Make sure 'out->cd_offset + in_length' doesn't overflow. */
186*2f9f8a9bSJason King if (out->cd_offset < 0)
187*2f9f8a9bSJason King return (CRYPTO_DATA_LEN_RANGE);
188*2f9f8a9bSJason King if (SIZE_MAX - in_length < (size_t)out->cd_offset)
189*2f9f8a9bSJason King return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
190*2f9f8a9bSJason King
191*2f9f8a9bSJason King /*
192*2f9f8a9bSJason King * This check guarantees 'out' contains sufficient space for
193*2f9f8a9bSJason King * the resulting output.
194*2f9f8a9bSJason King */
195*2f9f8a9bSJason King if (out->cd_offset + in_length > out->cd_length)
196*2f9f8a9bSJason King return (CRYPTO_BUFFER_TOO_SMALL);
197*2f9f8a9bSJason King
198*2f9f8a9bSJason King crypto_init_ptrs(out, &iov_or_mp, &offset);
199*2f9f8a9bSJason King
200*2f9f8a9bSJason King /* Now XOR the output with the keystream */
201*2f9f8a9bSJason King while (in_remainder > 0) {
202*2f9f8a9bSJason King /*
203*2f9f8a9bSJason King * If out is a uio_t or an mblk_t, in_remainder might be
204*2f9f8a9bSJason King * larger than an individual iovec_t or mblk_t in out.
205*2f9f8a9bSJason King * crypto_get_ptrs uses the value of offset to set the
206*2f9f8a9bSJason King * the value of out_data to the correct address for writing
207*2f9f8a9bSJason King * and sets out_data_len to reflect the largest amount of data
208*2f9f8a9bSJason King * (up to in_remainder) that can be written to out_data. It
209*2f9f8a9bSJason King * also increments offset by out_data_len. out_data_remainder
210*2f9f8a9bSJason King * is set to the start of the next segment for writing, however
211*2f9f8a9bSJason King * it is not used here since the updated value of offset
212*2f9f8a9bSJason King * will be used in the next loop iteration to locate the
213*2f9f8a9bSJason King * next mblk_t/iovec_t. Since the sum of the size of all data
214*2f9f8a9bSJason King * buffers in 'out' (out->cd_length) was checked immediately
215*2f9f8a9bSJason King * prior to starting the loop, we should always terminate
216*2f9f8a9bSJason King * the loop.
217*2f9f8a9bSJason King */
218*2f9f8a9bSJason King crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data,
219*2f9f8a9bSJason King &out_data_len, &out_data_remainder, in_remainder);
220*2f9f8a9bSJason King
221*2f9f8a9bSJason King /*
222*2f9f8a9bSJason King * crypto_get_ptrs() should guarantee these, but act as a
223*2f9f8a9bSJason King * safeguard in case the behavior ever changes.
224*2f9f8a9bSJason King */
225*2f9f8a9bSJason King ASSERT3U(out_data_len, <=, in_remainder);
226*2f9f8a9bSJason King ASSERT3U(out_data_len, >, 0);
227*2f9f8a9bSJason King
228*2f9f8a9bSJason King ctr_xor(ctx, inp, out_data, out_data_len, block_size, cipher);
229*2f9f8a9bSJason King
230*2f9f8a9bSJason King inp += out_data_len;
231*2f9f8a9bSJason King in_remainder -= out_data_len;
232*2f9f8a9bSJason King }
233*2f9f8a9bSJason King
234*2f9f8a9bSJason King out->cd_offset += in_length;
235*2f9f8a9bSJason King
23623c57df7Smcpowers return (CRYPTO_SUCCESS);
23723c57df7Smcpowers }
23823c57df7Smcpowers
23923c57df7Smcpowers int
ctr_init_ctx(ctr_ctx_t * ctr_ctx,ulong_t count,uint8_t * cb,int (* cipher)(const void * ks,const uint8_t * pt,uint8_t * ct),void (* copy_block)(uint8_t *,uint8_t *))24023c57df7Smcpowers ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
241*2f9f8a9bSJason King int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
24223c57df7Smcpowers void (*copy_block)(uint8_t *, uint8_t *))
24323c57df7Smcpowers {
24416239bc8SMark Powers uint64_t upper_mask = 0;
24516239bc8SMark Powers uint64_t lower_mask = 0;
24623c57df7Smcpowers
24716239bc8SMark Powers if (count == 0 || count > 128) {
24823c57df7Smcpowers return (CRYPTO_MECHANISM_PARAM_INVALID);
24923c57df7Smcpowers }
25016239bc8SMark Powers /* upper 64 bits of the mask */
25116239bc8SMark Powers if (count >= 64) {
25216239bc8SMark Powers count -= 64;
25316239bc8SMark Powers upper_mask = (count == 64) ? UINT64_MAX : (1ULL << count) - 1;
25416239bc8SMark Powers lower_mask = UINT64_MAX;
25516239bc8SMark Powers } else {
25616239bc8SMark Powers /* now the lower 63 bits */
25716239bc8SMark Powers lower_mask = (1ULL << count) - 1;
25816239bc8SMark Powers }
25916239bc8SMark Powers ctr_ctx->ctr_lower_mask = htonll(lower_mask);
26016239bc8SMark Powers ctr_ctx->ctr_upper_mask = htonll(upper_mask);
2614b56a003SDaniel Anderson
26223c57df7Smcpowers copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
26323c57df7Smcpowers ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
264*2f9f8a9bSJason King
265*2f9f8a9bSJason King /* Generate the first block of the keystream */
266*2f9f8a9bSJason King cipher(ctr_ctx->ctr_keysched, (uint8_t *)ctr_ctx->ctr_cb,
267*2f9f8a9bSJason King (uint8_t *)ctr_ctx->ctr_keystream);
268*2f9f8a9bSJason King
26923c57df7Smcpowers ctr_ctx->ctr_flags |= CTR_MODE;
27023c57df7Smcpowers return (CRYPTO_SUCCESS);
27123c57df7Smcpowers }
27223c57df7Smcpowers
27323c57df7Smcpowers /* ARGSUSED */
27423c57df7Smcpowers void *
ctr_alloc_ctx(int kmflag)27523c57df7Smcpowers ctr_alloc_ctx(int kmflag)
27623c57df7Smcpowers {
27723c57df7Smcpowers ctr_ctx_t *ctr_ctx;
27823c57df7Smcpowers
27923c57df7Smcpowers #ifdef _KERNEL
28023c57df7Smcpowers if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
28123c57df7Smcpowers #else
28223c57df7Smcpowers if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
28323c57df7Smcpowers #endif
28423c57df7Smcpowers return (NULL);
28523c57df7Smcpowers
28623c57df7Smcpowers ctr_ctx->ctr_flags = CTR_MODE;
28723c57df7Smcpowers return (ctr_ctx);
28823c57df7Smcpowers }
289