xref: /freebsd/sys/crypto/openssl/ossl_chacha20.c (revision a3266ba2697a383d2ede56803320d941866c7e76)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3  *
4  * Copyright (c) 2020 Netflix, Inc
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer,
11  *    without modification.
12  * 2. Redistributions in binary form must reproduce at minimum a disclaimer
13  *    similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any
14  *    redistribution must be conditioned upon including a substantially
15  *    similar Disclaimer requirement for further binary redistribution.
16  *
17  * NO WARRANTY
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20  * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY
21  * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
22  * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,
23  * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
26  * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28  * THE POSSIBILITY OF SUCH DAMAGES.
29  */
30 
31 #include <sys/types.h>
32 #include <sys/endian.h>
33 #include <sys/malloc.h>
34 #include <sys/time.h>
35 
36 #include <opencrypto/cryptodev.h>
37 
38 #include <crypto/openssl/ossl.h>
39 #include <crypto/openssl/ossl_chacha.h>
40 #include <crypto/openssl/ossl_poly1305.h>
41 
42 int
43 ossl_chacha20(struct cryptop *crp, const struct crypto_session_params *csp)
44 {
45 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
46 	unsigned int counter[CHACHA_CTR_SIZE / 4];
47 	unsigned char block[CHACHA_BLK_SIZE];
48 	struct crypto_buffer_cursor cc_in, cc_out;
49 	const unsigned char *in, *inseg, *cipher_key;
50 	unsigned char *out, *outseg;
51 	size_t resid, todo, inlen, outlen;
52 	uint32_t next_counter;
53 	u_int i;
54 
55 	if (crp->crp_cipher_key != NULL)
56 		cipher_key = crp->crp_cipher_key;
57 	else
58 		cipher_key = csp->csp_cipher_key;
59 	for (i = 0; i < nitems(key); i++)
60 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
61 	crypto_read_iv(crp, counter);
62 	for (i = 0; i < nitems(counter); i++)
63 		counter[i] = le32toh(counter[i]);
64 
65 	resid = crp->crp_payload_length;
66 	crypto_cursor_init(&cc_in, &crp->crp_buf);
67 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
68 	inseg = crypto_cursor_segment(&cc_in, &inlen);
69 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
70 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
71 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
72 	} else
73 		cc_out = cc_in;
74 	outseg = crypto_cursor_segment(&cc_out, &outlen);
75 	while (resid >= CHACHA_BLK_SIZE) {
76 		if (inlen < CHACHA_BLK_SIZE) {
77 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
78 			in = block;
79 			inlen = CHACHA_BLK_SIZE;
80 		} else
81 			in = inseg;
82 		if (outlen < CHACHA_BLK_SIZE) {
83 			out = block;
84 			outlen = CHACHA_BLK_SIZE;
85 		} else
86 			out = outseg;
87 
88 		/* Figure out how many blocks we can encrypt/decrypt at once. */
89 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
90 		    CHACHA_BLK_SIZE);
91 
92 #ifdef __LP64__
93 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
94 		todo = (uint32_t)todo;
95 #endif
96 
97 		/* Truncate if the 32-bit counter would roll over. */
98 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
99 		if (next_counter < counter[0]) {
100 			todo -= next_counter * CHACHA_BLK_SIZE;
101 			next_counter = 0;
102 		}
103 
104 		ChaCha20_ctr32(out, in, todo, key, counter);
105 
106 		counter[0] = next_counter;
107 		if (counter[0] == 0)
108 			counter[1]++;
109 
110 		if (out == block) {
111 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
112 			outseg = crypto_cursor_segment(&cc_out, &outlen);
113 		} else {
114 			crypto_cursor_advance(&cc_out, todo);
115 			outseg += todo;
116 			outlen -= todo;
117 		}
118 		if (in == block) {
119 			inseg = crypto_cursor_segment(&cc_in, &inlen);
120 		} else {
121 			crypto_cursor_advance(&cc_in, todo);
122 			inseg += todo;
123 			inlen -= todo;
124 		}
125 		resid -= todo;
126 	}
127 
128 	if (resid > 0) {
129 		memset(block, 0, sizeof(block));
130 		crypto_cursor_copydata(&cc_in, resid, block);
131 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
132 		crypto_cursor_copyback(&cc_out, resid, block);
133 	}
134 
135 	explicit_bzero(block, sizeof(block));
136 	explicit_bzero(counter, sizeof(counter));
137 	explicit_bzero(key, sizeof(key));
138 	return (0);
139 }
140 
141 int
142 ossl_chacha20_poly1305_encrypt(struct cryptop *crp,
143     const struct crypto_session_params *csp)
144 {
145 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
146 	unsigned int counter[CHACHA_CTR_SIZE / 4];
147 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
148 	unsigned char tag[POLY1305_HASH_LEN];
149 	POLY1305 auth_ctx;
150 	struct crypto_buffer_cursor cc_in, cc_out;
151 	const unsigned char *in, *inseg, *cipher_key;
152 	unsigned char *out, *outseg;
153 	size_t resid, todo, inlen, outlen;
154 	uint32_t next_counter;
155 	u_int i;
156 
157 	if (crp->crp_cipher_key != NULL)
158 		cipher_key = crp->crp_cipher_key;
159 	else
160 		cipher_key = csp->csp_cipher_key;
161 	for (i = 0; i < nitems(key); i++)
162 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
163 
164 	crypto_read_iv(crp, counter + 1);
165 	for (i = 1; i < nitems(counter); i++)
166 		counter[i] = le32toh(counter[i]);
167 
168 	/* Block 0 is used to generate the poly1305 key. */
169 	counter[0] = 0;
170 
171 	memset(block, 0, sizeof(block));
172 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
173 	Poly1305_Init(&auth_ctx, block);
174 
175 	/* MAC the AAD. */
176 	if (crp->crp_aad != NULL)
177 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
178 	else
179 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
180 		    ossl_poly1305_update, &auth_ctx);
181 	if (crp->crp_aad_length % 16 != 0) {
182 		/* padding1 */
183 		memset(block, 0, 16);
184 		Poly1305_Update(&auth_ctx, block,
185 		    16 - crp->crp_aad_length % 16);
186 	}
187 
188 	/* Encryption starts with block 1. */
189 	counter[0] = 1;
190 
191 	/* Do encryption with MAC */
192 	resid = crp->crp_payload_length;
193 	crypto_cursor_init(&cc_in, &crp->crp_buf);
194 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
195 	inseg = crypto_cursor_segment(&cc_in, &inlen);
196 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
197 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
198 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
199 	} else
200 		cc_out = cc_in;
201 	outseg = crypto_cursor_segment(&cc_out, &outlen);
202 	while (resid >= CHACHA_BLK_SIZE) {
203 		if (inlen < CHACHA_BLK_SIZE) {
204 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
205 			in = block;
206 			inlen = CHACHA_BLK_SIZE;
207 		} else
208 			in = inseg;
209 		if (outlen < CHACHA_BLK_SIZE) {
210 			out = block;
211 			outlen = CHACHA_BLK_SIZE;
212 		} else
213 			out = outseg;
214 
215 		/* Figure out how many blocks we can encrypt/decrypt at once. */
216 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
217 		    CHACHA_BLK_SIZE);
218 
219 #ifdef __LP64__
220 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
221 		todo = (uint32_t)todo;
222 #endif
223 
224 		/* Truncate if the 32-bit counter would roll over. */
225 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
226 		if (next_counter < counter[0]) {
227 			todo -= next_counter * CHACHA_BLK_SIZE;
228 			next_counter = 0;
229 		}
230 
231 		ChaCha20_ctr32(out, in, todo, key, counter);
232 		Poly1305_Update(&auth_ctx, out, todo);
233 
234 		counter[0] = next_counter;
235 		if (counter[0] == 0)
236 			counter[1]++;
237 
238 		if (out == block) {
239 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
240 			outseg = crypto_cursor_segment(&cc_out, &outlen);
241 		} else {
242 			crypto_cursor_advance(&cc_out, todo);
243 			outseg += todo;
244 			outlen -= todo;
245 		}
246 		if (in == block) {
247 			inseg = crypto_cursor_segment(&cc_in, &inlen);
248 		} else {
249 			crypto_cursor_advance(&cc_in, todo);
250 			inseg += todo;
251 			inlen -= todo;
252 		}
253 		resid -= todo;
254 	}
255 
256 	if (resid > 0) {
257 		memset(block, 0, sizeof(block));
258 		crypto_cursor_copydata(&cc_in, resid, block);
259 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
260 		crypto_cursor_copyback(&cc_out, resid, block);
261 
262 		/* padding2 */
263 		todo = roundup2(resid, 16);
264 		memset(block + resid, 0, todo - resid);
265 		Poly1305_Update(&auth_ctx, block, todo);
266 	}
267 
268 	/* lengths */
269 	le64enc(block, crp->crp_aad_length);
270 	le64enc(block + 8, crp->crp_payload_length);
271 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
272 
273 	Poly1305_Final(&auth_ctx, tag);
274 	crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?
275 	    POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);
276 
277 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
278 	explicit_bzero(tag, sizeof(tag));
279 	explicit_bzero(block, sizeof(block));
280 	explicit_bzero(counter, sizeof(counter));
281 	explicit_bzero(key, sizeof(key));
282 	return (0);
283 }
284 
285 
286 int
287 ossl_chacha20_poly1305_decrypt(struct cryptop *crp,
288     const struct crypto_session_params *csp)
289 {
290 	_Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
291 	unsigned int counter[CHACHA_CTR_SIZE / 4];
292 	_Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
293 	unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];
294 	struct poly1305_context auth_ctx;
295 	struct crypto_buffer_cursor cc_in, cc_out;
296 	const unsigned char *in, *inseg, *cipher_key;
297 	unsigned char *out, *outseg;
298 	size_t resid, todo, inlen, outlen;
299 	uint32_t next_counter;
300 	int error;
301 	u_int i, mlen;
302 
303 	if (crp->crp_cipher_key != NULL)
304 		cipher_key = crp->crp_cipher_key;
305 	else
306 		cipher_key = csp->csp_cipher_key;
307 	for (i = 0; i < nitems(key); i++)
308 		key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
309 
310 	crypto_read_iv(crp, counter + 1);
311 	for (i = 1; i < nitems(counter); i++)
312 		counter[i] = le32toh(counter[i]);
313 
314 	/* Block 0 is used to generate the poly1305 key. */
315 	counter[0] = 0;
316 
317 	memset(block, 0, sizeof(block));
318 	ChaCha20_ctr32(block, block, sizeof(block), key, counter);
319 	Poly1305_Init(&auth_ctx, block);
320 
321 	/* MAC the AAD. */
322 	if (crp->crp_aad != NULL)
323 		Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
324 	else
325 		crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
326 		    ossl_poly1305_update, &auth_ctx);
327 	if (crp->crp_aad_length % 16 != 0) {
328 		/* padding1 */
329 		memset(block, 0, 16);
330 		Poly1305_Update(&auth_ctx, block,
331 		    16 - crp->crp_aad_length % 16);
332 	}
333 
334 	/* Mac the ciphertext. */
335 	crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
336 	    ossl_poly1305_update, &auth_ctx);
337 	if (crp->crp_payload_length % 16 != 0) {
338 		/* padding2 */
339 		memset(block, 0, 16);
340 		Poly1305_Update(&auth_ctx, block,
341 		    16 - crp->crp_payload_length % 16);
342 	}
343 
344 	/* lengths */
345 	le64enc(block, crp->crp_aad_length);
346 	le64enc(block + 8, crp->crp_payload_length);
347 	Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
348 
349 	Poly1305_Final(&auth_ctx, tag);
350 	mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;
351 	crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);
352 	if (timingsafe_bcmp(tag, tag2, mlen) != 0) {
353 		error = EBADMSG;
354 		goto out;
355 	}
356 
357 	/* Decryption starts with block 1. */
358 	counter[0] = 1;
359 
360 	resid = crp->crp_payload_length;
361 	crypto_cursor_init(&cc_in, &crp->crp_buf);
362 	crypto_cursor_advance(&cc_in, crp->crp_payload_start);
363 	inseg = crypto_cursor_segment(&cc_in, &inlen);
364 	if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
365 		crypto_cursor_init(&cc_out, &crp->crp_obuf);
366 		crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
367 	} else
368 		cc_out = cc_in;
369 	outseg = crypto_cursor_segment(&cc_out, &outlen);
370 	while (resid >= CHACHA_BLK_SIZE) {
371 		if (inlen < CHACHA_BLK_SIZE) {
372 			crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
373 			in = block;
374 			inlen = CHACHA_BLK_SIZE;
375 		} else
376 			in = inseg;
377 		if (outlen < CHACHA_BLK_SIZE) {
378 			out = block;
379 			outlen = CHACHA_BLK_SIZE;
380 		} else
381 			out = outseg;
382 
383 		/* Figure out how many blocks we can encrypt/decrypt at once. */
384 		todo = rounddown(MIN(resid, MIN(inlen, outlen)),
385 		    CHACHA_BLK_SIZE);
386 
387 #ifdef __LP64__
388 		/* ChaCha20_ctr32() assumes length is <= 4GB. */
389 		todo = (uint32_t)todo;
390 #endif
391 
392 		/* Truncate if the 32-bit counter would roll over. */
393 		next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
394 		if (next_counter < counter[0]) {
395 			todo -= next_counter * CHACHA_BLK_SIZE;
396 			next_counter = 0;
397 		}
398 
399 		ChaCha20_ctr32(out, in, todo, key, counter);
400 
401 		counter[0] = next_counter;
402 		if (counter[0] == 0)
403 			counter[1]++;
404 
405 		if (out == block) {
406 			crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
407 			outseg = crypto_cursor_segment(&cc_out, &outlen);
408 		} else {
409 			crypto_cursor_advance(&cc_out, todo);
410 			outseg += todo;
411 			outlen -= todo;
412 		}
413 		if (in == block) {
414 			inseg = crypto_cursor_segment(&cc_in, &inlen);
415 		} else {
416 			crypto_cursor_advance(&cc_in, todo);
417 			inseg += todo;
418 			inlen -= todo;
419 		}
420 		resid -= todo;
421 	}
422 
423 	if (resid > 0) {
424 		memset(block, 0, sizeof(block));
425 		crypto_cursor_copydata(&cc_in, resid, block);
426 		ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
427 		crypto_cursor_copyback(&cc_out, resid, block);
428 	}
429 
430 	error = 0;
431 out:
432 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
433 	explicit_bzero(tag, sizeof(tag));
434 	explicit_bzero(block, sizeof(block));
435 	explicit_bzero(counter, sizeof(counter));
436 	explicit_bzero(key, sizeof(key));
437 	return (error);
438 }
439