xref: /freebsd/sys/crypto/openssl/ossl_chacha20.c (revision dce5f3abed7181cc533ca5ed3de44517775e78dd)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2020 Netflix, Inc
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer,
11  *    without modification.
12  * 2. Redistributions in binary form must reproduce at minimum a disclaimer
13  *    similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any
14  *    redistribution must be conditioned upon including a substantially
15  *    similar Disclaimer requirement for further binary redistribution.
16  *
17  * NO WARRANTY
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20  * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY
21  * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
22  * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,
23  * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
26  * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28  * THE POSSIBILITY OF SUCH DAMAGES.
29  */
30 
31 #include <sys/types.h>
32 #include <sys/endian.h>
33 #include <sys/malloc.h>
34 #include <sys/time.h>
35 
36 #include <opencrypto/cryptodev.h>
37 
38 #include <crypto/openssl/ossl.h>
39 #include <crypto/openssl/ossl_chacha.h>
40 #include <crypto/openssl/ossl_poly1305.h>
41 
42 int
43 ossl_chacha20(struct cryptop *crp, const struct crypto_session_params *csp)
44 {
45 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
46 	unsigned int counter[CHACHA_CTR_SIZE / 4];
47 	unsigned char block[CHACHA_BLK_SIZE];
48 	struct crypto_buffer_cursor cc_in, cc_out;
49 	const unsigned char *in, *inseg, *cipher_key;
50 	unsigned char *out, *outseg;
51 	size_t resid, todo, inlen, outlen;
52 	uint32_t next_counter;
53 	u_int i;
54 
55 	if (crp->crp_cipher_key != NULL)
56 		cipher_key = crp->crp_cipher_key;
57 	else
58 		cipher_key = csp->csp_cipher_key;
59 	for (i = 0; i < nitems(key); i++)
60 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
61 	crypto_read_iv(crp, counter);
62 	for (i = 0; i < nitems(counter); i++)
63 		counter[i] = le32toh(counter[i]);
64 
65 	resid = crp->crp_payload_length;
66 	crypto_cursor_init(&cc_in, &crp->crp_buf);
67 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
68 	inseg = crypto_cursor_segment(&cc_in, &inlen);
69 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
70 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
71 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
72 	} else
73 		cc_out = cc_in;
74 	outseg = crypto_cursor_segment(&cc_out, &outlen);
75 	while (resid >= CHACHA_BLK_SIZE) {
76 		if (inlen < CHACHA_BLK_SIZE) {
77 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
78 			in = block;
79 			inlen = CHACHA_BLK_SIZE;
80 		} else
81 			in = inseg;
82 		if (outlen < CHACHA_BLK_SIZE) {
83 			out = block;
84 			outlen = CHACHA_BLK_SIZE;
85 		} else
86 			out = outseg;
87 
88 		/* Figure out how many blocks we can encrypt/decrypt at once. */
89 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
90 		    CHACHA_BLK_SIZE);
91 
92 #ifdef __LP64__
93 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
94 		todo = (uint32_t)todo;
95 #endif
96 
97 		/* Truncate if the 32-bit counter would roll over. */
98 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
99 		if (next_counter < counter[0]) {
100 			todo -= next_counter * CHACHA_BLK_SIZE;
101 			next_counter = 0;
102 		}
103 
104 		ChaCha20_ctr32(out, in, todo, key, counter);
105 
106 		counter[0] = next_counter;
107 		if (counter[0] == 0)
108 			counter[1]++;
109 
110 		if (out == block) {
111 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
112 			outseg = crypto_cursor_segment(&cc_out, &outlen);
113 		} else {
114 			crypto_cursor_advance(&cc_out, todo);
115 			outseg += todo;
116 			outlen -= todo;
117 		}
118 		if (in == block) {
119 			inseg = crypto_cursor_segment(&cc_in, &inlen);
120 		} else {
121 			crypto_cursor_advance(&cc_in, todo);
122 			inseg += todo;
123 			inlen -= todo;
124 		}
125 		resid -= todo;
126 	}
127 
128 	if (resid > 0) {
129 		memset(block, 0, sizeof(block));
130 		crypto_cursor_copydata(&cc_in, resid, block);
131 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
132 		crypto_cursor_copyback(&cc_out, resid, block);
133 	}
134 
135 	explicit_bzero(block, sizeof(block));
136 	explicit_bzero(counter, sizeof(counter));
137 	explicit_bzero(key, sizeof(key));
138 	return (0);
139 }
140 
141 int
142 ossl_chacha20_poly1305_encrypt(struct cryptop *crp,
143     const struct crypto_session_params *csp)
144 {
145 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
146 	unsigned int counter[CHACHA_CTR_SIZE / 4];
147 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
148 	unsigned char tag[POLY1305_HASH_LEN];
149 	POLY1305 auth_ctx;
150 	struct crypto_buffer_cursor cc_in, cc_out;
151 	const unsigned char *in, *inseg, *cipher_key;
152 	unsigned char *out, *outseg;
153 	size_t resid, todo, inlen, outlen;
154 	uint32_t next_counter;
155 	u_int i;
156 
157 	if (crp->crp_cipher_key != NULL)
158 		cipher_key = crp->crp_cipher_key;
159 	else
160 		cipher_key = csp->csp_cipher_key;
161 	for (i = 0; i < nitems(key); i++)
162 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
163 
164 	memset(counter, 0, sizeof(counter));
165 	crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);
166 	for (i = 1; i < nitems(counter); i++)
167 		counter[i] = le32toh(counter[i]);
168 
169 	/* Block 0 is used to generate the poly1305 key. */
170 	counter[0] = 0;
171 
172 	memset(block, 0, sizeof(block));
173 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
174 	Poly1305_Init(&auth_ctx, block);
175 
176 	/* MAC the AAD. */
177 	if (crp->crp_aad != NULL)
178 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
179 	else
180 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
181 		    ossl_poly1305_update, &auth_ctx);
182 	if (crp->crp_aad_length % 16 != 0) {
183 		/* padding1 */
184 		memset(block, 0, 16);
185 		Poly1305_Update(&auth_ctx, block,
186 		    16 - crp->crp_aad_length % 16);
187 	}
188 
189 	/* Encryption starts with block 1. */
190 	counter[0] = 1;
191 
192 	/* Do encryption with MAC */
193 	resid = crp->crp_payload_length;
194 	crypto_cursor_init(&cc_in, &crp->crp_buf);
195 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
196 	inseg = crypto_cursor_segment(&cc_in, &inlen);
197 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
198 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
199 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
200 	} else
201 		cc_out = cc_in;
202 	outseg = crypto_cursor_segment(&cc_out, &outlen);
203 	while (resid >= CHACHA_BLK_SIZE) {
204 		if (inlen < CHACHA_BLK_SIZE) {
205 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
206 			in = block;
207 			inlen = CHACHA_BLK_SIZE;
208 		} else
209 			in = inseg;
210 		if (outlen < CHACHA_BLK_SIZE) {
211 			out = block;
212 			outlen = CHACHA_BLK_SIZE;
213 		} else
214 			out = outseg;
215 
216 		/* Figure out how many blocks we can encrypt/decrypt at once. */
217 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
218 		    CHACHA_BLK_SIZE);
219 
220 #ifdef __LP64__
221 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
222 		todo = (uint32_t)todo;
223 #endif
224 
225 		/* Truncate if the 32-bit counter would roll over. */
226 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
227 		if (csp->csp_ivlen == 8 && next_counter < counter[0]) {
228 			todo -= next_counter * CHACHA_BLK_SIZE;
229 			next_counter = 0;
230 		}
231 
232 		ChaCha20_ctr32(out, in, todo, key, counter);
233 		Poly1305_Update(&auth_ctx, out, todo);
234 
235 		counter[0] = next_counter;
236 		if (csp->csp_ivlen == 8 && counter[0] == 0)
237 			counter[1]++;
238 
239 		if (out == block) {
240 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
241 			outseg = crypto_cursor_segment(&cc_out, &outlen);
242 		} else {
243 			crypto_cursor_advance(&cc_out, todo);
244 			outseg += todo;
245 			outlen -= todo;
246 		}
247 		if (in == block) {
248 			inseg = crypto_cursor_segment(&cc_in, &inlen);
249 		} else {
250 			crypto_cursor_advance(&cc_in, todo);
251 			inseg += todo;
252 			inlen -= todo;
253 		}
254 		resid -= todo;
255 	}
256 
257 	if (resid > 0) {
258 		memset(block, 0, sizeof(block));
259 		crypto_cursor_copydata(&cc_in, resid, block);
260 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
261 		crypto_cursor_copyback(&cc_out, resid, block);
262 
263 		/* padding2 */
264 		todo = roundup2(resid, 16);
265 		memset(block + resid, 0, todo - resid);
266 		Poly1305_Update(&auth_ctx, block, todo);
267 	}
268 
269 	/* lengths */
270 	le64enc(block, crp->crp_aad_length);
271 	le64enc(block + 8, crp->crp_payload_length);
272 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
273 
274 	Poly1305_Final(&auth_ctx, tag);
275 	crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?
276 	    POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);
277 
278 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
279 	explicit_bzero(tag, sizeof(tag));
280 	explicit_bzero(block, sizeof(block));
281 	explicit_bzero(counter, sizeof(counter));
282 	explicit_bzero(key, sizeof(key));
283 	return (0);
284 }
285 
286 
287 int
288 ossl_chacha20_poly1305_decrypt(struct cryptop *crp,
289     const struct crypto_session_params *csp)
290 {
291 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
292 	unsigned int counter[CHACHA_CTR_SIZE / 4];
293 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
294 	unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];
295 	struct poly1305_context auth_ctx;
296 	struct crypto_buffer_cursor cc_in, cc_out;
297 	const unsigned char *in, *inseg, *cipher_key;
298 	unsigned char *out, *outseg;
299 	size_t resid, todo, inlen, outlen;
300 	uint32_t next_counter;
301 	int error;
302 	u_int i, mlen;
303 
304 	if (crp->crp_cipher_key != NULL)
305 		cipher_key = crp->crp_cipher_key;
306 	else
307 		cipher_key = csp->csp_cipher_key;
308 	for (i = 0; i < nitems(key); i++)
309 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
310 
311 	memset(counter, 0, sizeof(counter));
312 	crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);
313 	for (i = 1; i < nitems(counter); i++)
314 		counter[i] = le32toh(counter[i]);
315 
316 	/* Block 0 is used to generate the poly1305 key. */
317 	counter[0] = 0;
318 
319 	memset(block, 0, sizeof(block));
320 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
321 	Poly1305_Init(&auth_ctx, block);
322 
323 	/* MAC the AAD. */
324 	if (crp->crp_aad != NULL)
325 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
326 	else
327 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
328 		    ossl_poly1305_update, &auth_ctx);
329 	if (crp->crp_aad_length % 16 != 0) {
330 		/* padding1 */
331 		memset(block, 0, 16);
332 		Poly1305_Update(&auth_ctx, block,
333 		    16 - crp->crp_aad_length % 16);
334 	}
335 
336 	/* Mac the ciphertext. */
337 	crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
338 	    ossl_poly1305_update, &auth_ctx);
339 	if (crp->crp_payload_length % 16 != 0) {
340 		/* padding2 */
341 		memset(block, 0, 16);
342 		Poly1305_Update(&auth_ctx, block,
343 		    16 - crp->crp_payload_length % 16);
344 	}
345 
346 	/* lengths */
347 	le64enc(block, crp->crp_aad_length);
348 	le64enc(block + 8, crp->crp_payload_length);
349 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
350 
351 	Poly1305_Final(&auth_ctx, tag);
352 	mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;
353 	crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);
354 	if (timingsafe_bcmp(tag, tag2, mlen) != 0) {
355 		error = EBADMSG;
356 		goto out;
357 	}
358 
359 	/* Decryption starts with block 1. */
360 	counter[0] = 1;
361 
362 	resid = crp->crp_payload_length;
363 	crypto_cursor_init(&cc_in, &crp->crp_buf);
364 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
365 	inseg = crypto_cursor_segment(&cc_in, &inlen);
366 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
367 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
368 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
369 	} else
370 		cc_out = cc_in;
371 	outseg = crypto_cursor_segment(&cc_out, &outlen);
372 	while (resid >= CHACHA_BLK_SIZE) {
373 		if (inlen < CHACHA_BLK_SIZE) {
374 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
375 			in = block;
376 			inlen = CHACHA_BLK_SIZE;
377 		} else
378 			in = inseg;
379 		if (outlen < CHACHA_BLK_SIZE) {
380 			out = block;
381 			outlen = CHACHA_BLK_SIZE;
382 		} else
383 			out = outseg;
384 
385 		/* Figure out how many blocks we can encrypt/decrypt at once. */
386 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
387 		    CHACHA_BLK_SIZE);
388 
389 #ifdef __LP64__
390 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
391 		todo = (uint32_t)todo;
392 #endif
393 
394 		/* Truncate if the 32-bit counter would roll over. */
395 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
396 		if (csp->csp_ivlen == 8 && next_counter < counter[0]) {
397 			todo -= next_counter * CHACHA_BLK_SIZE;
398 			next_counter = 0;
399 		}
400 
401 		ChaCha20_ctr32(out, in, todo, key, counter);
402 
403 		counter[0] = next_counter;
404 		if (csp->csp_ivlen == 8 && counter[0] == 0)
405 			counter[1]++;
406 
407 		if (out == block) {
408 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
409 			outseg = crypto_cursor_segment(&cc_out, &outlen);
410 		} else {
411 			crypto_cursor_advance(&cc_out, todo);
412 			outseg += todo;
413 			outlen -= todo;
414 		}
415 		if (in == block) {
416 			inseg = crypto_cursor_segment(&cc_in, &inlen);
417 		} else {
418 			crypto_cursor_advance(&cc_in, todo);
419 			inseg += todo;
420 			inlen -= todo;
421 		}
422 		resid -= todo;
423 	}
424 
425 	if (resid > 0) {
426 		memset(block, 0, sizeof(block));
427 		crypto_cursor_copydata(&cc_in, resid, block);
428 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
429 		crypto_cursor_copyback(&cc_out, resid, block);
430 	}
431 
432 	error = 0;
433 out:
434 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
435 	explicit_bzero(tag, sizeof(tag));
436 	explicit_bzero(block, sizeof(block));
437 	explicit_bzero(counter, sizeof(counter));
438 	explicit_bzero(key, sizeof(key));
439 	return (error);
440 }
441