1*7188Smcpowers /*
2*7188Smcpowers  * CDDL HEADER START
3*7188Smcpowers  *
4*7188Smcpowers  * The contents of this file are subject to the terms of the
5*7188Smcpowers  * Common Development and Distribution License (the "License").
6*7188Smcpowers  * You may not use this file except in compliance with the License.
7*7188Smcpowers  *
8*7188Smcpowers  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9*7188Smcpowers  * or http://www.opensolaris.org/os/licensing.
10*7188Smcpowers  * See the License for the specific language governing permissions
11*7188Smcpowers  * and limitations under the License.
12*7188Smcpowers  *
13*7188Smcpowers  * When distributing Covered Code, include this CDDL HEADER in each
14*7188Smcpowers  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15*7188Smcpowers  * If applicable, add the following below this CDDL HEADER, with the
16*7188Smcpowers  * fields enclosed by brackets "[]" replaced with your own identifying
17*7188Smcpowers  * information: Portions Copyright [yyyy] [name of copyright owner]
18*7188Smcpowers  *
19*7188Smcpowers  * CDDL HEADER END
20*7188Smcpowers  */
21*7188Smcpowers /*
22*7188Smcpowers  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23*7188Smcpowers  * Use is subject to license terms.
24*7188Smcpowers  */
25*7188Smcpowers 
26*7188Smcpowers #pragma ident	"%Z%%M%	%I%	%E% SMI"
27*7188Smcpowers 
28*7188Smcpowers #ifndef _KERNEL
29*7188Smcpowers #include <strings.h>
30*7188Smcpowers #include <limits.h>
31*7188Smcpowers #include <assert.h>
32*7188Smcpowers #include <security/cryptoki.h>
33*7188Smcpowers #endif
34*7188Smcpowers 
35*7188Smcpowers #include <sys/types.h>
36*7188Smcpowers #include <modes/modes.h>
37*7188Smcpowers #include <sys/crypto/common.h>
38*7188Smcpowers #include <sys/crypto/impl.h>
39*7188Smcpowers 
40*7188Smcpowers /*
41*7188Smcpowers  * Encrypt and decrypt multiple blocks of data in counter mode.
42*7188Smcpowers  */
43*7188Smcpowers int
44*7188Smcpowers ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
45*7188Smcpowers     crypto_data_t *out, size_t block_size,
46*7188Smcpowers     int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
47*7188Smcpowers     void (*xor_block)(uint8_t *, uint8_t *))
48*7188Smcpowers {
49*7188Smcpowers 	size_t remainder = length;
50*7188Smcpowers 	size_t need;
51*7188Smcpowers 	uint8_t *datap = (uint8_t *)data;
52*7188Smcpowers 	uint8_t *blockp;
53*7188Smcpowers 	uint8_t *lastp;
54*7188Smcpowers 	void *iov_or_mp;
55*7188Smcpowers 	offset_t offset;
56*7188Smcpowers 	uint8_t *out_data_1;
57*7188Smcpowers 	uint8_t *out_data_2;
58*7188Smcpowers 	size_t out_data_1_len;
59*7188Smcpowers 	uint64_t counter;
60*7188Smcpowers #ifdef _LITTLE_ENDIAN
61*7188Smcpowers 	uint8_t *p;
62*7188Smcpowers #endif
63*7188Smcpowers 
64*7188Smcpowers 	if (length + ctx->ctr_remainder_len < block_size) {
65*7188Smcpowers 		/* accumulate bytes here and return */
66*7188Smcpowers 		bcopy(datap,
67*7188Smcpowers 		    (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
68*7188Smcpowers 		    length);
69*7188Smcpowers 		ctx->ctr_remainder_len += length;
70*7188Smcpowers 		ctx->ctr_copy_to = datap;
71*7188Smcpowers 		return (CRYPTO_SUCCESS);
72*7188Smcpowers 	}
73*7188Smcpowers 
74*7188Smcpowers 	lastp = (uint8_t *)ctx->ctr_cb;
75*7188Smcpowers 	if (out != NULL)
76*7188Smcpowers 		crypto_init_ptrs(out, &iov_or_mp, &offset);
77*7188Smcpowers 
78*7188Smcpowers 	do {
79*7188Smcpowers 		/* Unprocessed data from last call. */
80*7188Smcpowers 		if (ctx->ctr_remainder_len > 0) {
81*7188Smcpowers 			need = block_size - ctx->ctr_remainder_len;
82*7188Smcpowers 
83*7188Smcpowers 			if (need > remainder)
84*7188Smcpowers 				return (CRYPTO_DATA_LEN_RANGE);
85*7188Smcpowers 
86*7188Smcpowers 			bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
87*7188Smcpowers 			    [ctx->ctr_remainder_len], need);
88*7188Smcpowers 
89*7188Smcpowers 			blockp = (uint8_t *)ctx->ctr_remainder;
90*7188Smcpowers 		} else {
91*7188Smcpowers 			blockp = datap;
92*7188Smcpowers 		}
93*7188Smcpowers 
94*7188Smcpowers 		/* ctr_cb is the counter block */
95*7188Smcpowers 		cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
96*7188Smcpowers 		    (uint8_t *)ctx->ctr_tmp);
97*7188Smcpowers 
98*7188Smcpowers 		lastp = (uint8_t *)ctx->ctr_tmp;
99*7188Smcpowers 
100*7188Smcpowers 		/*
101*7188Smcpowers 		 * Increment counter. Counter bits are confined
102*7188Smcpowers 		 * to the bottom 64 bits of the counter block.
103*7188Smcpowers 		 */
104*7188Smcpowers 		counter = ctx->ctr_cb[1] & ctx->ctr_counter_mask;
105*7188Smcpowers #ifdef _LITTLE_ENDIAN
106*7188Smcpowers 		p = (uint8_t *)&counter;
107*7188Smcpowers 		counter = (((uint64_t)p[0] << 56) |
108*7188Smcpowers 		    ((uint64_t)p[1] << 48) |
109*7188Smcpowers 		    ((uint64_t)p[2] << 40) |
110*7188Smcpowers 		    ((uint64_t)p[3] << 32) |
111*7188Smcpowers 		    ((uint64_t)p[4] << 24) |
112*7188Smcpowers 		    ((uint64_t)p[5] << 16) |
113*7188Smcpowers 		    ((uint64_t)p[6] << 8) |
114*7188Smcpowers 		    (uint64_t)p[7]);
115*7188Smcpowers #endif
116*7188Smcpowers 		counter++;
117*7188Smcpowers #ifdef _LITTLE_ENDIAN
118*7188Smcpowers 		counter = (((uint64_t)p[0] << 56) |
119*7188Smcpowers 		    ((uint64_t)p[1] << 48) |
120*7188Smcpowers 		    ((uint64_t)p[2] << 40) |
121*7188Smcpowers 		    ((uint64_t)p[3] << 32) |
122*7188Smcpowers 		    ((uint64_t)p[4] << 24) |
123*7188Smcpowers 		    ((uint64_t)p[5] << 16) |
124*7188Smcpowers 		    ((uint64_t)p[6] << 8) |
125*7188Smcpowers 		    (uint64_t)p[7]);
126*7188Smcpowers #endif
127*7188Smcpowers 		counter &= ctx->ctr_counter_mask;
128*7188Smcpowers 		ctx->ctr_cb[1] =
129*7188Smcpowers 		    (ctx->ctr_cb[1] & ~(ctx->ctr_counter_mask)) | counter;
130*7188Smcpowers 
131*7188Smcpowers 		/*
132*7188Smcpowers 		 * XOR the previous cipher block or IV with the
133*7188Smcpowers 		 * current clear block.
134*7188Smcpowers 		 */
135*7188Smcpowers 		xor_block(blockp, lastp);
136*7188Smcpowers 
137*7188Smcpowers 		if (out == NULL) {
138*7188Smcpowers 			if (ctx->ctr_remainder_len > 0) {
139*7188Smcpowers 				bcopy(lastp, ctx->ctr_copy_to,
140*7188Smcpowers 				    ctx->ctr_remainder_len);
141*7188Smcpowers 				bcopy(lastp + ctx->ctr_remainder_len, datap,
142*7188Smcpowers 				    need);
143*7188Smcpowers 			}
144*7188Smcpowers 		} else {
145*7188Smcpowers 			crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
146*7188Smcpowers 			    &out_data_1_len, &out_data_2, block_size);
147*7188Smcpowers 
148*7188Smcpowers 			/* copy block to where it belongs */
149*7188Smcpowers 			bcopy(lastp, out_data_1, out_data_1_len);
150*7188Smcpowers 			if (out_data_2 != NULL) {
151*7188Smcpowers 				bcopy(lastp + out_data_1_len, out_data_2,
152*7188Smcpowers 				    block_size - out_data_1_len);
153*7188Smcpowers 			}
154*7188Smcpowers 			/* update offset */
155*7188Smcpowers 			out->cd_offset += block_size;
156*7188Smcpowers 		}
157*7188Smcpowers 
158*7188Smcpowers 		/* Update pointer to next block of data to be processed. */
159*7188Smcpowers 		if (ctx->ctr_remainder_len != 0) {
160*7188Smcpowers 			datap += need;
161*7188Smcpowers 			ctx->ctr_remainder_len = 0;
162*7188Smcpowers 		} else {
163*7188Smcpowers 			datap += block_size;
164*7188Smcpowers 		}
165*7188Smcpowers 
166*7188Smcpowers 		remainder = (size_t)&data[length] - (size_t)datap;
167*7188Smcpowers 
168*7188Smcpowers 		/* Incomplete last block. */
169*7188Smcpowers 		if (remainder > 0 && remainder < block_size) {
170*7188Smcpowers 			bcopy(datap, ctx->ctr_remainder, remainder);
171*7188Smcpowers 			ctx->ctr_remainder_len = remainder;
172*7188Smcpowers 			ctx->ctr_copy_to = datap;
173*7188Smcpowers 			goto out;
174*7188Smcpowers 		}
175*7188Smcpowers 		ctx->ctr_copy_to = NULL;
176*7188Smcpowers 
177*7188Smcpowers 	} while (remainder > 0);
178*7188Smcpowers 
179*7188Smcpowers out:
180*7188Smcpowers 	return (CRYPTO_SUCCESS);
181*7188Smcpowers }
182*7188Smcpowers 
183*7188Smcpowers int
184*7188Smcpowers ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
185*7188Smcpowers     int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
186*7188Smcpowers {
187*7188Smcpowers 	uint8_t *lastp;
188*7188Smcpowers 	void *iov_or_mp;
189*7188Smcpowers 	offset_t offset;
190*7188Smcpowers 	uint8_t *out_data_1;
191*7188Smcpowers 	uint8_t *out_data_2;
192*7188Smcpowers 	size_t out_data_1_len;
193*7188Smcpowers 	uint8_t *p;
194*7188Smcpowers 	int i;
195*7188Smcpowers 
196*7188Smcpowers 	if (out->cd_length < ctx->ctr_remainder_len)
197*7188Smcpowers 		return (CRYPTO_DATA_LEN_RANGE);
198*7188Smcpowers 
199*7188Smcpowers 	encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
200*7188Smcpowers 	    (uint8_t *)ctx->ctr_tmp);
201*7188Smcpowers 
202*7188Smcpowers 	lastp = (uint8_t *)ctx->ctr_tmp;
203*7188Smcpowers 	p = (uint8_t *)ctx->ctr_remainder;
204*7188Smcpowers 	for (i = 0; i < ctx->ctr_remainder_len; i++) {
205*7188Smcpowers 		p[i] ^= lastp[i];
206*7188Smcpowers 	}
207*7188Smcpowers 
208*7188Smcpowers 	crypto_init_ptrs(out, &iov_or_mp, &offset);
209*7188Smcpowers 	crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
210*7188Smcpowers 	    &out_data_1_len, &out_data_2, ctx->ctr_remainder_len);
211*7188Smcpowers 
212*7188Smcpowers 	bcopy(p, out_data_1, out_data_1_len);
213*7188Smcpowers 	if (out_data_2 != NULL) {
214*7188Smcpowers 		bcopy((uint8_t *)p + out_data_1_len,
215*7188Smcpowers 		    out_data_2, ctx->ctr_remainder_len - out_data_1_len);
216*7188Smcpowers 	}
217*7188Smcpowers 	out->cd_offset += ctx->ctr_remainder_len;
218*7188Smcpowers 	ctx->ctr_remainder_len = 0;
219*7188Smcpowers 	return (CRYPTO_SUCCESS);
220*7188Smcpowers }
221*7188Smcpowers 
222*7188Smcpowers int
223*7188Smcpowers ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
224*7188Smcpowers void (*copy_block)(uint8_t *, uint8_t *))
225*7188Smcpowers {
226*7188Smcpowers 	uint64_t mask = 0;
227*7188Smcpowers #ifdef _LITTLE_ENDIAN
228*7188Smcpowers 	uint8_t *p8;
229*7188Smcpowers #endif
230*7188Smcpowers 
231*7188Smcpowers 	if (count == 0 || count > 64) {
232*7188Smcpowers 		return (CRYPTO_MECHANISM_PARAM_INVALID);
233*7188Smcpowers 	}
234*7188Smcpowers 	while (count-- > 0)
235*7188Smcpowers 		mask |= (1ULL << count);
236*7188Smcpowers #ifdef _LITTLE_ENDIAN
237*7188Smcpowers 	p8 = (uint8_t *)&mask;
238*7188Smcpowers 	mask = (((uint64_t)p8[0] << 56) |
239*7188Smcpowers 	    ((uint64_t)p8[1] << 48) |
240*7188Smcpowers 	    ((uint64_t)p8[2] << 40) |
241*7188Smcpowers 	    ((uint64_t)p8[3] << 32) |
242*7188Smcpowers 	    ((uint64_t)p8[4] << 24) |
243*7188Smcpowers 	    ((uint64_t)p8[5] << 16) |
244*7188Smcpowers 	    ((uint64_t)p8[6] << 8) |
245*7188Smcpowers 	    (uint64_t)p8[7]);
246*7188Smcpowers #endif
247*7188Smcpowers 	ctr_ctx->ctr_counter_mask = mask;
248*7188Smcpowers 	copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
249*7188Smcpowers 	ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
250*7188Smcpowers 	ctr_ctx->ctr_flags |= CTR_MODE;
251*7188Smcpowers 	return (CRYPTO_SUCCESS);
252*7188Smcpowers }
253*7188Smcpowers 
254*7188Smcpowers /* ARGSUSED */
255*7188Smcpowers void *
256*7188Smcpowers ctr_alloc_ctx(int kmflag)
257*7188Smcpowers {
258*7188Smcpowers 	ctr_ctx_t *ctr_ctx;
259*7188Smcpowers 
260*7188Smcpowers #ifdef _KERNEL
261*7188Smcpowers 	if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
262*7188Smcpowers #else
263*7188Smcpowers 	if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
264*7188Smcpowers #endif
265*7188Smcpowers 		return (NULL);
266*7188Smcpowers 
267*7188Smcpowers 	ctr_ctx->ctr_flags = CTR_MODE;
268*7188Smcpowers 	return (ctr_ctx);
269*7188Smcpowers }
270