xref: /freebsd/sys/crypto/openssl/ossl_chacha20.c (revision ac099daf6742ead81ea7ea86351a8ef4e783041b)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2020 Netflix, Inc
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer,
11  *    without modification.
12  * 2. Redistributions in binary form must reproduce at minimum a disclaimer
13  *    similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any
14  *    redistribution must be conditioned upon including a substantially
15  *    similar Disclaimer requirement for further binary redistribution.
16  *
17  * NO WARRANTY
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20  * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY
21  * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
22  * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,
23  * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
26  * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28  * THE POSSIBILITY OF SUCH DAMAGES.
29  */
30 
31 #include <sys/types.h>
32 #include <sys/endian.h>
33 #include <sys/malloc.h>
34 #include <sys/time.h>
35 
36 #include <opencrypto/cryptodev.h>
37 
38 #include <crypto/openssl/ossl.h>
39 #include <crypto/openssl/ossl_chacha.h>
40 #include <crypto/openssl/ossl_poly1305.h>
41 
42 int
43 ossl_chacha20(struct cryptop *crp, const struct crypto_session_params *csp)
44 {
45 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
46 	unsigned int counter[CHACHA_CTR_SIZE / 4];
47 	unsigned char block[CHACHA_BLK_SIZE];
48 	struct crypto_buffer_cursor cc_in, cc_out;
49 	const unsigned char *in, *inseg, *cipher_key;
50 	unsigned char *out, *outseg;
51 	size_t resid, todo, inlen, outlen;
52 	uint32_t next_counter;
53 	u_int i;
54 
55 	if (crp->crp_cipher_key != NULL)
56 		cipher_key = crp->crp_cipher_key;
57 	else
58 		cipher_key = csp->csp_cipher_key;
59 	for (i = 0; i < nitems(key); i++)
60 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
61 	crypto_read_iv(crp, counter);
62 	for (i = 0; i < nitems(counter); i++)
63 		counter[i] = le32toh(counter[i]);
64 
65 	resid = crp->crp_payload_length;
66 	crypto_cursor_init(&cc_in, &crp->crp_buf);
67 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
68 	inseg = crypto_cursor_segbase(&cc_in);
69 	inlen = crypto_cursor_seglen(&cc_in);
70 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
71 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
72 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
73 	} else
74 		cc_out = cc_in;
75 	outseg = crypto_cursor_segbase(&cc_out);
76 	outlen = crypto_cursor_seglen(&cc_out);
77 	while (resid >= CHACHA_BLK_SIZE) {
78 		if (inlen < CHACHA_BLK_SIZE) {
79 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
80 			in = block;
81 			inlen = CHACHA_BLK_SIZE;
82 		} else
83 			in = inseg;
84 		if (outlen < CHACHA_BLK_SIZE) {
85 			out = block;
86 			outlen = CHACHA_BLK_SIZE;
87 		} else
88 			out = outseg;
89 
90 		/* Figure out how many blocks we can encrypt/decrypt at once. */
91 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
92 		    CHACHA_BLK_SIZE);
93 
94 #ifdef __LP64__
95 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
96 		todo = (uint32_t)todo;
97 #endif
98 
99 		/* Truncate if the 32-bit counter would roll over. */
100 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
101 		if (next_counter < counter[0]) {
102 			todo -= next_counter * CHACHA_BLK_SIZE;
103 			next_counter = 0;
104 		}
105 
106 		ChaCha20_ctr32(out, in, todo, key, counter);
107 
108 		counter[0] = next_counter;
109 		if (counter[0] == 0)
110 			counter[1]++;
111 
112 		if (out == block) {
113 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
114 			outseg = crypto_cursor_segbase(&cc_out);
115 			outlen = crypto_cursor_seglen(&cc_out);
116 		} else {
117 			crypto_cursor_advance(&cc_out, todo);
118 			outseg += todo;
119 			outlen -= todo;
120 		}
121 		if (in == block) {
122 			inseg = crypto_cursor_segbase(&cc_in);
123 			inlen = crypto_cursor_seglen(&cc_in);
124 		} else {
125 			crypto_cursor_advance(&cc_in, todo);
126 			inseg += todo;
127 			inlen -= todo;
128 		}
129 		resid -= todo;
130 	}
131 
132 	if (resid > 0) {
133 		memset(block, 0, sizeof(block));
134 		crypto_cursor_copydata(&cc_in, resid, block);
135 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
136 		crypto_cursor_copyback(&cc_out, resid, block);
137 	}
138 
139 	explicit_bzero(block, sizeof(block));
140 	explicit_bzero(counter, sizeof(counter));
141 	explicit_bzero(key, sizeof(key));
142 	return (0);
143 }
144 
145 int
146 ossl_chacha20_poly1305_encrypt(struct cryptop *crp,
147     const struct crypto_session_params *csp)
148 {
149 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
150 	unsigned int counter[CHACHA_CTR_SIZE / 4];
151 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
152 	unsigned char tag[POLY1305_HASH_LEN];
153 	POLY1305 auth_ctx;
154 	struct crypto_buffer_cursor cc_in, cc_out;
155 	const unsigned char *in, *inseg, *cipher_key;
156 	unsigned char *out, *outseg;
157 	size_t resid, todo, inlen, outlen;
158 	uint32_t next_counter;
159 	u_int i;
160 
161 	if (crp->crp_cipher_key != NULL)
162 		cipher_key = crp->crp_cipher_key;
163 	else
164 		cipher_key = csp->csp_cipher_key;
165 	for (i = 0; i < nitems(key); i++)
166 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
167 
168 	crypto_read_iv(crp, counter + 1);
169 	for (i = 1; i < nitems(counter); i++)
170 		counter[i] = le32toh(counter[i]);
171 
172 	/* Block 0 is used to generate the poly1305 key. */
173 	counter[0] = 0;
174 
175 	memset(block, 0, sizeof(block));
176 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
177 	Poly1305_Init(&auth_ctx, block);
178 
179 	/* MAC the AAD. */
180 	if (crp->crp_aad != NULL)
181 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
182 	else
183 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
184 		    ossl_poly1305_update, &auth_ctx);
185 	if (crp->crp_aad_length % 16 != 0) {
186 		/* padding1 */
187 		memset(block, 0, 16);
188 		Poly1305_Update(&auth_ctx, block,
189 		    16 - crp->crp_aad_length % 16);
190 	}
191 
192 	/* Encryption starts with block 1. */
193 	counter[0] = 1;
194 
195 	/* Do encryption with MAC */
196 	resid = crp->crp_payload_length;
197 	crypto_cursor_init(&cc_in, &crp->crp_buf);
198 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
199 	inseg = crypto_cursor_segbase(&cc_in);
200 	inlen = crypto_cursor_seglen(&cc_in);
201 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
202 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
203 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
204 	} else
205 		cc_out = cc_in;
206 	outseg = crypto_cursor_segbase(&cc_out);
207 	outlen = crypto_cursor_seglen(&cc_out);
208 	while (resid >= CHACHA_BLK_SIZE) {
209 		if (inlen < CHACHA_BLK_SIZE) {
210 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
211 			in = block;
212 			inlen = CHACHA_BLK_SIZE;
213 		} else
214 			in = inseg;
215 		if (outlen < CHACHA_BLK_SIZE) {
216 			out = block;
217 			outlen = CHACHA_BLK_SIZE;
218 		} else
219 			out = outseg;
220 
221 		/* Figure out how many blocks we can encrypt/decrypt at once. */
222 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
223 		    CHACHA_BLK_SIZE);
224 
225 #ifdef __LP64__
226 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
227 		todo = (uint32_t)todo;
228 #endif
229 
230 		/* Truncate if the 32-bit counter would roll over. */
231 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
232 		if (next_counter < counter[0]) {
233 			todo -= next_counter * CHACHA_BLK_SIZE;
234 			next_counter = 0;
235 		}
236 
237 		ChaCha20_ctr32(out, in, todo, key, counter);
238 		Poly1305_Update(&auth_ctx, out, todo);
239 
240 		counter[0] = next_counter;
241 		if (counter[0] == 0)
242 			counter[1]++;
243 
244 		if (out == block) {
245 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
246 			outseg = crypto_cursor_segbase(&cc_out);
247 			outlen = crypto_cursor_seglen(&cc_out);
248 		} else {
249 			crypto_cursor_advance(&cc_out, todo);
250 			outseg += todo;
251 			outlen -= todo;
252 		}
253 		if (in == block) {
254 			inseg = crypto_cursor_segbase(&cc_in);
255 			inlen = crypto_cursor_seglen(&cc_in);
256 		} else {
257 			crypto_cursor_advance(&cc_in, todo);
258 			inseg += todo;
259 			inlen -= todo;
260 		}
261 		resid -= todo;
262 	}
263 
264 	if (resid > 0) {
265 		memset(block, 0, sizeof(block));
266 		crypto_cursor_copydata(&cc_in, resid, block);
267 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
268 		crypto_cursor_copyback(&cc_out, resid, block);
269 
270 		/* padding2 */
271 		todo = roundup2(resid, 16);
272 		memset(block + resid, 0, todo - resid);
273 		Poly1305_Update(&auth_ctx, block, todo);
274 	}
275 
276 	/* lengths */
277 	le64enc(block, crp->crp_aad_length);
278 	le64enc(block + 8, crp->crp_payload_length);
279 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
280 
281 	Poly1305_Final(&auth_ctx, tag);
282 	crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?
283 	    POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);
284 
285 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
286 	explicit_bzero(tag, sizeof(tag));
287 	explicit_bzero(block, sizeof(block));
288 	explicit_bzero(counter, sizeof(counter));
289 	explicit_bzero(key, sizeof(key));
290 	return (0);
291 }
292 
293 
294 int
295 ossl_chacha20_poly1305_decrypt(struct cryptop *crp,
296     const struct crypto_session_params *csp)
297 {
298 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
299 	unsigned int counter[CHACHA_CTR_SIZE / 4];
300 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
301 	unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];
302 	struct poly1305_context auth_ctx;
303 	struct crypto_buffer_cursor cc_in, cc_out;
304 	const unsigned char *in, *inseg, *cipher_key;
305 	unsigned char *out, *outseg;
306 	size_t resid, todo, inlen, outlen;
307 	uint32_t next_counter;
308 	int error;
309 	u_int i, mlen;
310 
311 	if (crp->crp_cipher_key != NULL)
312 		cipher_key = crp->crp_cipher_key;
313 	else
314 		cipher_key = csp->csp_cipher_key;
315 	for (i = 0; i < nitems(key); i++)
316 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
317 
318 	crypto_read_iv(crp, counter + 1);
319 	for (i = 1; i < nitems(counter); i++)
320 		counter[i] = le32toh(counter[i]);
321 
322 	/* Block 0 is used to generate the poly1305 key. */
323 	counter[0] = 0;
324 
325 	memset(block, 0, sizeof(block));
326 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
327 	Poly1305_Init(&auth_ctx, block);
328 
329 	/* MAC the AAD. */
330 	if (crp->crp_aad != NULL)
331 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
332 	else
333 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
334 		    ossl_poly1305_update, &auth_ctx);
335 	if (crp->crp_aad_length % 16 != 0) {
336 		/* padding1 */
337 		memset(block, 0, 16);
338 		Poly1305_Update(&auth_ctx, block,
339 		    16 - crp->crp_aad_length % 16);
340 	}
341 
342 	/* Mac the ciphertext. */
343 	crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
344 	    ossl_poly1305_update, &auth_ctx);
345 	if (crp->crp_payload_length % 16 != 0) {
346 		/* padding2 */
347 		memset(block, 0, 16);
348 		Poly1305_Update(&auth_ctx, block,
349 		    16 - crp->crp_payload_length % 16);
350 	}
351 
352 	/* lengths */
353 	le64enc(block, crp->crp_aad_length);
354 	le64enc(block + 8, crp->crp_payload_length);
355 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
356 
357 	Poly1305_Final(&auth_ctx, tag);
358 	mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;
359 	crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);
360 	if (timingsafe_bcmp(tag, tag2, mlen) != 0) {
361 		error = EBADMSG;
362 		goto out;
363 	}
364 
365 	/* Decryption starts with block 1. */
366 	counter[0] = 1;
367 
368 	resid = crp->crp_payload_length;
369 	crypto_cursor_init(&cc_in, &crp->crp_buf);
370 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
371 	inseg = crypto_cursor_segbase(&cc_in);
372 	inlen = crypto_cursor_seglen(&cc_in);
373 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
374 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
375 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
376 	} else
377 		cc_out = cc_in;
378 	outseg = crypto_cursor_segbase(&cc_out);
379 	outlen = crypto_cursor_seglen(&cc_out);
380 	while (resid >= CHACHA_BLK_SIZE) {
381 		if (inlen < CHACHA_BLK_SIZE) {
382 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
383 			in = block;
384 			inlen = CHACHA_BLK_SIZE;
385 		} else
386 			in = inseg;
387 		if (outlen < CHACHA_BLK_SIZE) {
388 			out = block;
389 			outlen = CHACHA_BLK_SIZE;
390 		} else
391 			out = outseg;
392 
393 		/* Figure out how many blocks we can encrypt/decrypt at once. */
394 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
395 		    CHACHA_BLK_SIZE);
396 
397 #ifdef __LP64__
398 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
399 		todo = (uint32_t)todo;
400 #endif
401 
402 		/* Truncate if the 32-bit counter would roll over. */
403 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
404 		if (next_counter < counter[0]) {
405 			todo -= next_counter * CHACHA_BLK_SIZE;
406 			next_counter = 0;
407 		}
408 
409 		ChaCha20_ctr32(out, in, todo, key, counter);
410 
411 		counter[0] = next_counter;
412 		if (counter[0] == 0)
413 			counter[1]++;
414 
415 		if (out == block) {
416 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
417 			outseg = crypto_cursor_segbase(&cc_out);
418 			outlen = crypto_cursor_seglen(&cc_out);
419 		} else {
420 			crypto_cursor_advance(&cc_out, todo);
421 			outseg += todo;
422 			outlen -= todo;
423 		}
424 		if (in == block) {
425 			inseg = crypto_cursor_segbase(&cc_in);
426 			inlen = crypto_cursor_seglen(&cc_in);
427 		} else {
428 			crypto_cursor_advance(&cc_in, todo);
429 			inseg += todo;
430 			inlen -= todo;
431 		}
432 		resid -= todo;
433 	}
434 
435 	if (resid > 0) {
436 		memset(block, 0, sizeof(block));
437 		crypto_cursor_copydata(&cc_in, resid, block);
438 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
439 		crypto_cursor_copyback(&cc_out, resid, block);
440 	}
441 
442 	error = 0;
443 out:
444 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
445 	explicit_bzero(tag, sizeof(tag));
446 	explicit_bzero(block, sizeof(block));
447 	explicit_bzero(counter, sizeof(counter));
448 	explicit_bzero(key, sizeof(key));
449 	return (error);
450 }
451