xref: /titanic_41/usr/src/common/crypto/modes/ctr.c (revision 16239bc82c111618343e0a5b1a70e0fc702d00e0)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #ifndef _KERNEL
27 #include <strings.h>
28 #include <limits.h>
29 #include <assert.h>
30 #include <security/cryptoki.h>
31 #endif
32 
33 #include <sys/types.h>
34 #include <modes/modes.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/byteorder.h>
38 
39 /*
40  * Encrypt and decrypt multiple blocks of data in counter mode.
41  */
42 int
ctr_mode_contiguous_blocks(ctr_ctx_t * ctx,char * data,size_t length,crypto_data_t * out,size_t block_size,int (* cipher)(const void * ks,const uint8_t * pt,uint8_t * ct),void (* xor_block)(uint8_t *,uint8_t *))43 ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
44     crypto_data_t *out, size_t block_size,
45     int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
46     void (*xor_block)(uint8_t *, uint8_t *))
47 {
48 	size_t remainder = length;
49 	size_t need;
50 	uint8_t *datap = (uint8_t *)data;
51 	uint8_t *blockp;
52 	uint8_t *lastp;
53 	void *iov_or_mp;
54 	offset_t offset;
55 	uint8_t *out_data_1;
56 	uint8_t *out_data_2;
57 	size_t out_data_1_len;
58 	uint64_t lower_counter, upper_counter;
59 
60 	if (length + ctx->ctr_remainder_len < block_size) {
61 		/* accumulate bytes here and return */
62 		bcopy(datap,
63 		    (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
64 		    length);
65 		ctx->ctr_remainder_len += length;
66 		ctx->ctr_copy_to = datap;
67 		return (CRYPTO_SUCCESS);
68 	}
69 
70 	lastp = (uint8_t *)ctx->ctr_cb;
71 	if (out != NULL)
72 		crypto_init_ptrs(out, &iov_or_mp, &offset);
73 
74 	do {
75 		/* Unprocessed data from last call. */
76 		if (ctx->ctr_remainder_len > 0) {
77 			need = block_size - ctx->ctr_remainder_len;
78 
79 			if (need > remainder)
80 				return (CRYPTO_DATA_LEN_RANGE);
81 
82 			bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
83 			    [ctx->ctr_remainder_len], need);
84 
85 			blockp = (uint8_t *)ctx->ctr_remainder;
86 		} else {
87 			blockp = datap;
88 		}
89 
90 		/* ctr_cb is the counter block */
91 		cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
92 		    (uint8_t *)ctx->ctr_tmp);
93 
94 		lastp = (uint8_t *)ctx->ctr_tmp;
95 
96 		/*
97 		 * Increment Counter.
98 		 */
99 		lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
100 		lower_counter = htonll(lower_counter + 1);
101 		lower_counter &= ctx->ctr_lower_mask;
102 		ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
103 		    lower_counter;
104 
105 		/* wrap around */
106 		if (lower_counter == 0) {
107 			upper_counter =
108 			    ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
109 			upper_counter = htonll(upper_counter + 1);
110 			upper_counter &= ctx->ctr_upper_mask;
111 			ctx->ctr_cb[0] =
112 			    (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
113 			    upper_counter;
114 		}
115 
116 		/*
117 		 * XOR encrypted counter block with the current clear block.
118 		 */
119 		xor_block(blockp, lastp);
120 
121 		if (out == NULL) {
122 			if (ctx->ctr_remainder_len > 0) {
123 				bcopy(lastp, ctx->ctr_copy_to,
124 				    ctx->ctr_remainder_len);
125 				bcopy(lastp + ctx->ctr_remainder_len, datap,
126 				    need);
127 			}
128 		} else {
129 			crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
130 			    &out_data_1_len, &out_data_2, block_size);
131 
132 			/* copy block to where it belongs */
133 			bcopy(lastp, out_data_1, out_data_1_len);
134 			if (out_data_2 != NULL) {
135 				bcopy(lastp + out_data_1_len, out_data_2,
136 				    block_size - out_data_1_len);
137 			}
138 			/* update offset */
139 			out->cd_offset += block_size;
140 		}
141 
142 		/* Update pointer to next block of data to be processed. */
143 		if (ctx->ctr_remainder_len != 0) {
144 			datap += need;
145 			ctx->ctr_remainder_len = 0;
146 		} else {
147 			datap += block_size;
148 		}
149 
150 		remainder = (size_t)&data[length] - (size_t)datap;
151 
152 		/* Incomplete last block. */
153 		if (remainder > 0 && remainder < block_size) {
154 			bcopy(datap, ctx->ctr_remainder, remainder);
155 			ctx->ctr_remainder_len = remainder;
156 			ctx->ctr_copy_to = datap;
157 			goto out;
158 		}
159 		ctx->ctr_copy_to = NULL;
160 
161 	} while (remainder > 0);
162 
163 out:
164 	return (CRYPTO_SUCCESS);
165 }
166 
167 int
ctr_mode_final(ctr_ctx_t * ctx,crypto_data_t * out,int (* encrypt_block)(const void *,const uint8_t *,uint8_t *))168 ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
169     int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
170 {
171 	uint8_t *lastp;
172 	void *iov_or_mp;
173 	offset_t offset;
174 	uint8_t *out_data_1;
175 	uint8_t *out_data_2;
176 	size_t out_data_1_len;
177 	uint8_t *p;
178 	int i;
179 
180 	if (out->cd_length < ctx->ctr_remainder_len)
181 		return (CRYPTO_DATA_LEN_RANGE);
182 
183 	encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
184 	    (uint8_t *)ctx->ctr_tmp);
185 
186 	lastp = (uint8_t *)ctx->ctr_tmp;
187 	p = (uint8_t *)ctx->ctr_remainder;
188 	for (i = 0; i < ctx->ctr_remainder_len; i++) {
189 		p[i] ^= lastp[i];
190 	}
191 
192 	crypto_init_ptrs(out, &iov_or_mp, &offset);
193 	crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
194 	    &out_data_1_len, &out_data_2, ctx->ctr_remainder_len);
195 
196 	bcopy(p, out_data_1, out_data_1_len);
197 	if (out_data_2 != NULL) {
198 		bcopy((uint8_t *)p + out_data_1_len,
199 		    out_data_2, ctx->ctr_remainder_len - out_data_1_len);
200 	}
201 	out->cd_offset += ctx->ctr_remainder_len;
202 	ctx->ctr_remainder_len = 0;
203 	return (CRYPTO_SUCCESS);
204 }
205 
206 int
ctr_init_ctx(ctr_ctx_t * ctr_ctx,ulong_t count,uint8_t * cb,void (* copy_block)(uint8_t *,uint8_t *))207 ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
208 void (*copy_block)(uint8_t *, uint8_t *))
209 {
210 	uint64_t upper_mask = 0;
211 	uint64_t lower_mask = 0;
212 
213 	if (count == 0 || count > 128) {
214 		return (CRYPTO_MECHANISM_PARAM_INVALID);
215 	}
216 	/* upper 64 bits of the mask */
217 	if (count >= 64) {
218 		count -= 64;
219 		upper_mask = (count == 64) ? UINT64_MAX : (1ULL << count) - 1;
220 		lower_mask = UINT64_MAX;
221 	} else {
222 		/* now the lower 63 bits */
223 		lower_mask = (1ULL << count) - 1;
224 	}
225 	ctr_ctx->ctr_lower_mask = htonll(lower_mask);
226 	ctr_ctx->ctr_upper_mask = htonll(upper_mask);
227 
228 	copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
229 	ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
230 	ctr_ctx->ctr_flags |= CTR_MODE;
231 	return (CRYPTO_SUCCESS);
232 }
233 
234 /* ARGSUSED */
235 void *
ctr_alloc_ctx(int kmflag)236 ctr_alloc_ctx(int kmflag)
237 {
238 	ctr_ctx_t *ctr_ctx;
239 
240 #ifdef _KERNEL
241 	if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
242 #else
243 	if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
244 #endif
245 		return (NULL);
246 
247 	ctr_ctx->ctr_flags = CTR_MODE;
248 	return (ctr_ctx);
249 }
250