xref: /freebsd/sys/crypto/openssl/ossl_chacha20.c (revision 78991a93eb9dd3074a3fc19b88a7c3e34e1ec703)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2020 Netflix, Inc
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer,
11  *    without modification.
12  * 2. Redistributions in binary form must reproduce at minimum a disclaimer
13  *    similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any
14  *    redistribution must be conditioned upon including a substantially
15  *    similar Disclaimer requirement for further binary redistribution.
16  *
17  * NO WARRANTY
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20  * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY
21  * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
22  * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,
23  * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
26  * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28  * THE POSSIBILITY OF SUCH DAMAGES.
29  */
30 
31 #include <sys/types.h>
32 #include <sys/endian.h>
33 #include <sys/malloc.h>
34 #include <sys/time.h>
35 
36 #include <opencrypto/cryptodev.h>
37 
38 #include <crypto/openssl/ossl.h>
39 #include <crypto/openssl/ossl_chacha.h>
40 #include <crypto/openssl/ossl_poly1305.h>
41 
42 int
43 ossl_chacha20(struct cryptop *crp, const struct crypto_session_params *csp)
44 {
45 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
46 	unsigned int counter[CHACHA_CTR_SIZE / 4];
47 	unsigned char block[CHACHA_BLK_SIZE];
48 	struct crypto_buffer_cursor cc_in, cc_out;
49 	const unsigned char *in, *inseg, *cipher_key;
50 	unsigned char *out, *outseg;
51 	size_t resid, todo, inlen, outlen;
52 	uint32_t next_counter;
53 	u_int i;
54 
55 	if (crp->crp_cipher_key != NULL)
56 		cipher_key = crp->crp_cipher_key;
57 	else
58 		cipher_key = csp->csp_cipher_key;
59 	for (i = 0; i < nitems(key); i++)
60 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
61 	crypto_read_iv(crp, counter);
62 	for (i = 0; i < nitems(counter); i++)
63 		counter[i] = le32toh(counter[i]);
64 
65 	resid = crp->crp_payload_length;
66 	crypto_cursor_init(&cc_in, &crp->crp_buf);
67 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
68 	inseg = crypto_cursor_segbase(&cc_in);
69 	inlen = crypto_cursor_seglen(&cc_in);
70 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
71 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
72 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
73 	} else
74 		cc_out = cc_in;
75 	outseg = crypto_cursor_segbase(&cc_out);
76 	outlen = crypto_cursor_seglen(&cc_out);
77 	while (resid >= CHACHA_BLK_SIZE) {
78 		if (inlen < CHACHA_BLK_SIZE) {
79 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
80 			in = block;
81 			inlen = CHACHA_BLK_SIZE;
82 		} else
83 			in = inseg;
84 		if (outlen < CHACHA_BLK_SIZE) {
85 			out = block;
86 			outlen = CHACHA_BLK_SIZE;
87 		} else
88 			out = outseg;
89 
90 		/* Figure out how many blocks we can encrypt/decrypt at once. */
91 		todo = rounddown(MIN(inlen, outlen), CHACHA_BLK_SIZE);
92 
93 #ifdef __LP64__
94 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
95 		todo = (uint32_t)todo;
96 #endif
97 
98 		/* Truncate if the 32-bit counter would roll over. */
99 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
100 		if (next_counter < counter[0]) {
101 			todo -= next_counter * CHACHA_BLK_SIZE;
102 			next_counter = 0;
103 		}
104 
105 		ChaCha20_ctr32(out, in, todo, key, counter);
106 
107 		counter[0] = next_counter;
108 		if (counter[0] == 0)
109 			counter[1]++;
110 
111 		if (out == block) {
112 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
113 			outseg = crypto_cursor_segbase(&cc_out);
114 			outlen = crypto_cursor_seglen(&cc_out);
115 		} else {
116 			crypto_cursor_advance(&cc_out, todo);
117 			outseg += todo;
118 			outlen -= todo;
119 		}
120 		if (in == block) {
121 			inseg = crypto_cursor_segbase(&cc_in);
122 			inlen = crypto_cursor_seglen(&cc_in);
123 		} else {
124 			crypto_cursor_advance(&cc_in, todo);
125 			inseg += todo;
126 			inlen -= todo;
127 		}
128 		resid -= todo;
129 	}
130 
131 	if (resid > 0) {
132 		memset(block, 0, sizeof(block));
133 		crypto_cursor_copydata(&cc_in, resid, block);
134 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
135 		crypto_cursor_copyback(&cc_out, resid, block);
136 	}
137 
138 	explicit_bzero(block, sizeof(block));
139 	explicit_bzero(counter, sizeof(counter));
140 	explicit_bzero(key, sizeof(key));
141 	return (0);
142 }
143 
144 int
145 ossl_chacha20_poly1305_encrypt(struct cryptop *crp,
146     const struct crypto_session_params *csp)
147 {
148 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
149 	unsigned int counter[CHACHA_CTR_SIZE / 4];
150 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
151 	unsigned char tag[POLY1305_HASH_LEN];
152 	POLY1305 auth_ctx;
153 	struct crypto_buffer_cursor cc_in, cc_out;
154 	const unsigned char *in, *inseg, *cipher_key;
155 	unsigned char *out, *outseg;
156 	size_t resid, todo, inlen, outlen;
157 	uint32_t next_counter;
158 	u_int i;
159 
160 	if (crp->crp_cipher_key != NULL)
161 		cipher_key = crp->crp_cipher_key;
162 	else
163 		cipher_key = csp->csp_cipher_key;
164 	for (i = 0; i < nitems(key); i++)
165 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
166 
167 	crypto_read_iv(crp, counter + 1);
168 	for (i = 1; i < nitems(counter); i++)
169 		counter[i] = le32toh(counter[i]);
170 
171 	/* Block 0 is used to generate the poly1305 key. */
172 	counter[0] = 0;
173 
174 	memset(block, 0, sizeof(block));
175 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
176 	Poly1305_Init(&auth_ctx, block);
177 
178 	/* MAC the AAD. */
179 	if (crp->crp_aad != NULL)
180 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
181 	else
182 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
183 		    ossl_poly1305_update, &auth_ctx);
184 	if (crp->crp_aad_length % 16 != 0) {
185 		/* padding1 */
186 		memset(block, 0, 16);
187 		Poly1305_Update(&auth_ctx, block,
188 		    16 - crp->crp_aad_length % 16);
189 	}
190 
191 	/* Encryption starts with block 1. */
192 	counter[0] = 1;
193 
194 	/* Do encryption with MAC */
195 	resid = crp->crp_payload_length;
196 	crypto_cursor_init(&cc_in, &crp->crp_buf);
197 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
198 	inseg = crypto_cursor_segbase(&cc_in);
199 	inlen = crypto_cursor_seglen(&cc_in);
200 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
201 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
202 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
203 	} else
204 		cc_out = cc_in;
205 	outseg = crypto_cursor_segbase(&cc_out);
206 	outlen = crypto_cursor_seglen(&cc_out);
207 	while (resid >= CHACHA_BLK_SIZE) {
208 		if (inlen < CHACHA_BLK_SIZE) {
209 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
210 			in = block;
211 			inlen = CHACHA_BLK_SIZE;
212 		} else
213 			in = inseg;
214 		if (outlen < CHACHA_BLK_SIZE) {
215 			out = block;
216 			outlen = CHACHA_BLK_SIZE;
217 		} else
218 			out = outseg;
219 
220 		/* Figure out how many blocks we can encrypt/decrypt at once. */
221 		todo = rounddown(MIN(inlen, outlen), CHACHA_BLK_SIZE);
222 
223 #ifdef __LP64__
224 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
225 		todo = (uint32_t)todo;
226 #endif
227 
228 		/* Truncate if the 32-bit counter would roll over. */
229 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
230 		if (next_counter < counter[0]) {
231 			todo -= next_counter * CHACHA_BLK_SIZE;
232 			next_counter = 0;
233 		}
234 
235 		ChaCha20_ctr32(out, in, todo, key, counter);
236 		Poly1305_Update(&auth_ctx, out, todo);
237 
238 		counter[0] = next_counter;
239 		if (counter[0] == 0)
240 			counter[1]++;
241 
242 		if (out == block) {
243 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
244 			outseg = crypto_cursor_segbase(&cc_out);
245 			outlen = crypto_cursor_seglen(&cc_out);
246 		} else {
247 			crypto_cursor_advance(&cc_out, todo);
248 			outseg += todo;
249 			outlen -= todo;
250 		}
251 		if (in == block) {
252 			inseg = crypto_cursor_segbase(&cc_in);
253 			inlen = crypto_cursor_seglen(&cc_in);
254 		} else {
255 			crypto_cursor_advance(&cc_in, todo);
256 			inseg += todo;
257 			inlen -= todo;
258 		}
259 		resid -= todo;
260 	}
261 
262 	if (resid > 0) {
263 		memset(block, 0, sizeof(block));
264 		crypto_cursor_copydata(&cc_in, resid, block);
265 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
266 		crypto_cursor_copyback(&cc_out, resid, block);
267 
268 		/* padding2 */
269 		todo = roundup2(resid, 16);
270 		memset(block + resid, 0, todo - resid);
271 		Poly1305_Update(&auth_ctx, block, todo);
272 	}
273 
274 	/* lengths */
275 	le64enc(block, crp->crp_aad_length);
276 	le64enc(block + 8, crp->crp_payload_length);
277 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
278 
279 	Poly1305_Final(&auth_ctx, tag);
280 	crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?
281 	    POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);
282 
283 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
284 	explicit_bzero(tag, sizeof(tag));
285 	explicit_bzero(block, sizeof(block));
286 	explicit_bzero(counter, sizeof(counter));
287 	explicit_bzero(key, sizeof(key));
288 	return (0);
289 }
290 
291 
292 int
293 ossl_chacha20_poly1305_decrypt(struct cryptop *crp,
294     const struct crypto_session_params *csp)
295 {
296 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
297 	unsigned int counter[CHACHA_CTR_SIZE / 4];
298 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
299 	unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];
300 	struct poly1305_context auth_ctx;
301 	struct crypto_buffer_cursor cc_in, cc_out;
302 	const unsigned char *in, *inseg, *cipher_key;
303 	unsigned char *out, *outseg;
304 	size_t resid, todo, inlen, outlen;
305 	uint32_t next_counter;
306 	int error;
307 	u_int i, mlen;
308 
309 	if (crp->crp_cipher_key != NULL)
310 		cipher_key = crp->crp_cipher_key;
311 	else
312 		cipher_key = csp->csp_cipher_key;
313 	for (i = 0; i < nitems(key); i++)
314 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
315 
316 	crypto_read_iv(crp, counter + 1);
317 	for (i = 1; i < nitems(counter); i++)
318 		counter[i] = le32toh(counter[i]);
319 
320 	/* Block 0 is used to generate the poly1305 key. */
321 	counter[0] = 0;
322 
323 	memset(block, 0, sizeof(block));
324 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
325 	Poly1305_Init(&auth_ctx, block);
326 
327 	/* MAC the AAD. */
328 	if (crp->crp_aad != NULL)
329 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
330 	else
331 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
332 		    ossl_poly1305_update, &auth_ctx);
333 	if (crp->crp_aad_length % 16 != 0) {
334 		/* padding1 */
335 		memset(block, 0, 16);
336 		Poly1305_Update(&auth_ctx, block,
337 		    16 - crp->crp_aad_length % 16);
338 	}
339 
340 	/* Mac the ciphertext. */
341 	crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
342 	    ossl_poly1305_update, &auth_ctx);
343 	if (crp->crp_payload_length % 16 != 0) {
344 		/* padding2 */
345 		memset(block, 0, 16);
346 		Poly1305_Update(&auth_ctx, block,
347 		    16 - crp->crp_payload_length % 16);
348 	}
349 
350 	/* lengths */
351 	le64enc(block, crp->crp_aad_length);
352 	le64enc(block + 8, crp->crp_payload_length);
353 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
354 
355 	Poly1305_Final(&auth_ctx, tag);
356 	mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;
357 	crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);
358 	if (timingsafe_bcmp(tag, tag2, mlen) != 0) {
359 		error = EBADMSG;
360 		goto out;
361 	}
362 
363 	/* Decryption starts with block 1. */
364 	counter[0] = 1;
365 
366 	resid = crp->crp_payload_length;
367 	crypto_cursor_init(&cc_in, &crp->crp_buf);
368 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
369 	inseg = crypto_cursor_segbase(&cc_in);
370 	inlen = crypto_cursor_seglen(&cc_in);
371 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
372 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
373 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
374 	} else
375 		cc_out = cc_in;
376 	outseg = crypto_cursor_segbase(&cc_out);
377 	outlen = crypto_cursor_seglen(&cc_out);
378 	while (resid >= CHACHA_BLK_SIZE) {
379 		if (inlen < CHACHA_BLK_SIZE) {
380 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
381 			in = block;
382 			inlen = CHACHA_BLK_SIZE;
383 		} else
384 			in = inseg;
385 		if (outlen < CHACHA_BLK_SIZE) {
386 			out = block;
387 			outlen = CHACHA_BLK_SIZE;
388 		} else
389 			out = outseg;
390 
391 		/* Figure out how many blocks we can encrypt/decrypt at once. */
392 		todo = rounddown(MIN(inlen, outlen), CHACHA_BLK_SIZE);
393 
394 #ifdef __LP64__
395 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
396 		todo = (uint32_t)todo;
397 #endif
398 
399 		/* Truncate if the 32-bit counter would roll over. */
400 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
401 		if (next_counter < counter[0]) {
402 			todo -= next_counter * CHACHA_BLK_SIZE;
403 			next_counter = 0;
404 		}
405 
406 		ChaCha20_ctr32(out, in, todo, key, counter);
407 
408 		counter[0] = next_counter;
409 		if (counter[0] == 0)
410 			counter[1]++;
411 
412 		if (out == block) {
413 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
414 			outseg = crypto_cursor_segbase(&cc_out);
415 			outlen = crypto_cursor_seglen(&cc_out);
416 		} else {
417 			crypto_cursor_advance(&cc_out, todo);
418 			outseg += todo;
419 			outlen -= todo;
420 		}
421 		if (in == block) {
422 			inseg = crypto_cursor_segbase(&cc_in);
423 			inlen = crypto_cursor_seglen(&cc_in);
424 		} else {
425 			crypto_cursor_advance(&cc_in, todo);
426 			inseg += todo;
427 			inlen -= todo;
428 		}
429 		resid -= todo;
430 	}
431 
432 	if (resid > 0) {
433 		memset(block, 0, sizeof(block));
434 		crypto_cursor_copydata(&cc_in, resid, block);
435 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
436 		crypto_cursor_copyback(&cc_out, resid, block);
437 	}
438 
439 	error = 0;
440 out:
441 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
442 	explicit_bzero(tag, sizeof(tag));
443 	explicit_bzero(block, sizeof(block));
444 	explicit_bzero(counter, sizeof(counter));
445 	explicit_bzero(key, sizeof(key));
446 	return (error);
447 }
448