xref: /illumos-gate/usr/src/common/crypto/modes/ctr.c (revision 7c80a9608efb5c2bb78fb923e352a01088239788)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #ifndef _KERNEL
27 #include <strings.h>
28 #include <limits.h>
29 #include <assert.h>
30 #include <security/cryptoki.h>
31 #endif
32 
33 #include <sys/types.h>
34 #include <modes/modes.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/byteorder.h>
38 
39 /*
40  * Encrypt and decrypt multiple blocks of data in counter mode.
41  */
42 int
43 ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
44     crypto_data_t *out, size_t block_size,
45     int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
46     void (*xor_block)(uint8_t *, uint8_t *))
47 {
48 	size_t remainder = length;
49 	size_t need;
50 	uint8_t *datap = (uint8_t *)data;
51 	uint8_t *blockp;
52 	uint8_t *lastp;
53 	void *iov_or_mp;
54 	offset_t offset;
55 	uint8_t *out_data_1;
56 	uint8_t *out_data_2;
57 	size_t out_data_1_len;
58 	uint64_t lower_counter, upper_counter;
59 
60 	if (length + ctx->ctr_remainder_len < block_size) {
61 		/* accumulate bytes here and return */
62 		bcopy(datap,
63 		    (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
64 		    length);
65 		ctx->ctr_remainder_len += length;
66 		ctx->ctr_copy_to = datap;
67 		return (CRYPTO_SUCCESS);
68 	}
69 
70 	lastp = (uint8_t *)ctx->ctr_cb;
71 	if (out != NULL)
72 		crypto_init_ptrs(out, &iov_or_mp, &offset);
73 
74 	do {
75 		/* Unprocessed data from last call. */
76 		if (ctx->ctr_remainder_len > 0) {
77 			need = block_size - ctx->ctr_remainder_len;
78 
79 			if (need > remainder)
80 				return (CRYPTO_DATA_LEN_RANGE);
81 
82 			bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
83 			    [ctx->ctr_remainder_len], need);
84 
85 			blockp = (uint8_t *)ctx->ctr_remainder;
86 		} else {
87 			blockp = datap;
88 		}
89 
90 		/* ctr_cb is the counter block */
91 		cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
92 		    (uint8_t *)ctx->ctr_tmp);
93 
94 		lastp = (uint8_t *)ctx->ctr_tmp;
95 
96 		/*
97 		 * Increment Counter.
98 		 */
99 		lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
100 		lower_counter = htonll(lower_counter + 1);
101 		lower_counter &= ctx->ctr_lower_mask;
102 		ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
103 		    lower_counter;
104 
105 		/* wrap around */
106 		if (lower_counter == 0) {
107 			upper_counter =
108 			    ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
109 			upper_counter = htonll(upper_counter + 1);
110 			upper_counter &= ctx->ctr_upper_mask;
111 			ctx->ctr_cb[0] =
112 			    (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
113 			    upper_counter;
114 		}
115 
116 		/*
117 		 * XOR encrypted counter block with the current clear block.
118 		 */
119 		xor_block(blockp, lastp);
120 
121 		if (out == NULL) {
122 			if (ctx->ctr_remainder_len > 0) {
123 				bcopy(lastp, ctx->ctr_copy_to,
124 				    ctx->ctr_remainder_len);
125 				bcopy(lastp + ctx->ctr_remainder_len, datap,
126 				    need);
127 			}
128 		} else {
129 			crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
130 			    &out_data_1_len, &out_data_2, block_size);
131 
132 			/* copy block to where it belongs */
133 			bcopy(lastp, out_data_1, out_data_1_len);
134 			if (out_data_2 != NULL) {
135 				bcopy(lastp + out_data_1_len, out_data_2,
136 				    block_size - out_data_1_len);
137 			}
138 			/* update offset */
139 			out->cd_offset += block_size;
140 		}
141 
142 		/* Update pointer to next block of data to be processed. */
143 		if (ctx->ctr_remainder_len != 0) {
144 			datap += need;
145 			ctx->ctr_remainder_len = 0;
146 		} else {
147 			datap += block_size;
148 		}
149 
150 		remainder = (size_t)&data[length] - (size_t)datap;
151 
152 		/* Incomplete last block. */
153 		if (remainder > 0 && remainder < block_size) {
154 			bcopy(datap, ctx->ctr_remainder, remainder);
155 			ctx->ctr_remainder_len = remainder;
156 			ctx->ctr_copy_to = datap;
157 			goto out;
158 		}
159 		ctx->ctr_copy_to = NULL;
160 
161 	} while (remainder > 0);
162 
163 out:
164 	return (CRYPTO_SUCCESS);
165 }
166 
167 int
168 ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
169     int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
170 {
171 	uint8_t *lastp;
172 	uint8_t *p;
173 	int i;
174 	int rv;
175 
176 	if (out->cd_length < ctx->ctr_remainder_len)
177 		return (CRYPTO_DATA_LEN_RANGE);
178 
179 	encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
180 	    (uint8_t *)ctx->ctr_tmp);
181 
182 	lastp = (uint8_t *)ctx->ctr_tmp;
183 	p = (uint8_t *)ctx->ctr_remainder;
184 	for (i = 0; i < ctx->ctr_remainder_len; i++) {
185 		p[i] ^= lastp[i];
186 	}
187 
188 	rv = crypto_put_output_data(p, out, ctx->ctr_remainder_len);
189 	if (rv == CRYPTO_SUCCESS) {
190 		out->cd_offset += ctx->ctr_remainder_len;
191 		ctx->ctr_remainder_len = 0;
192 	}
193 	return (rv);
194 }
195 
196 int
197 ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
198     void (*copy_block)(uint8_t *, uint8_t *))
199 {
200 	uint64_t upper_mask = 0;
201 	uint64_t lower_mask = 0;
202 
203 	if (count == 0 || count > 128) {
204 		return (CRYPTO_MECHANISM_PARAM_INVALID);
205 	}
206 	/* upper 64 bits of the mask */
207 	if (count >= 64) {
208 		count -= 64;
209 		upper_mask = (count == 64) ? UINT64_MAX : (1ULL << count) - 1;
210 		lower_mask = UINT64_MAX;
211 	} else {
212 		/* now the lower 63 bits */
213 		lower_mask = (1ULL << count) - 1;
214 	}
215 	ctr_ctx->ctr_lower_mask = htonll(lower_mask);
216 	ctr_ctx->ctr_upper_mask = htonll(upper_mask);
217 
218 	copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
219 	ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
220 	ctr_ctx->ctr_flags |= CTR_MODE;
221 	return (CRYPTO_SUCCESS);
222 }
223 
224 /* ARGSUSED */
225 void *
226 ctr_alloc_ctx(int kmflag)
227 {
228 	ctr_ctx_t *ctr_ctx;
229 
230 #ifdef _KERNEL
231 	if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
232 #else
233 	if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
234 #endif
235 		return (NULL);
236 
237 	ctr_ctx->ctr_flags = CTR_MODE;
238 	return (ctr_ctx);
239 }
240