1 /*-
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2020 Netflix, Inc
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer,
11 * without modification.
12 * 2. Redistributions in binary form must reproduce at minimum a disclaimer
13 * similar to the "NO WARRANTY" disclaimer below ("Disclaimer") and any
14 * redistribution must be conditioned upon including a substantially
15 * similar Disclaimer requirement for further binary redistribution.
16 *
17 * NO WARRANTY
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 * LIMITED TO, THE IMPLIED WARRANTIES OF NONINFRINGEMENT, MERCHANTIBILITY
21 * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
22 * THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR SPECIAL, EXEMPLARY,
23 * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
26 * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28 * THE POSSIBILITY OF SUCH DAMAGES.
29 */
30
31 #include <sys/types.h>
32 #include <sys/endian.h>
33 #include <sys/malloc.h>
34 #include <sys/time.h>
35
36 #include <opencrypto/cryptodev.h>
37
38 #include <crypto/openssl/ossl.h>
39 #include <crypto/openssl/ossl_chacha.h>
40 #include <crypto/openssl/ossl_cipher.h>
41 #include <crypto/openssl/ossl_poly1305.h>
42
43 static ossl_cipher_process_t ossl_chacha20;
44
45 struct ossl_cipher ossl_cipher_chacha20 = {
46 .type = CRYPTO_CHACHA20,
47 .blocksize = CHACHA_BLK_SIZE,
48 .ivsize = CHACHA_CTR_SIZE,
49
50 .set_encrypt_key = NULL,
51 .set_decrypt_key = NULL,
52 .process = ossl_chacha20
53 };
54
55 static int
ossl_chacha20(struct ossl_session_cipher * s,struct cryptop * crp,const struct crypto_session_params * csp)56 ossl_chacha20(struct ossl_session_cipher *s, struct cryptop *crp,
57 const struct crypto_session_params *csp)
58 {
59 _Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
60 unsigned int counter[CHACHA_CTR_SIZE / 4];
61 unsigned char block[CHACHA_BLK_SIZE];
62 struct crypto_buffer_cursor cc_in, cc_out;
63 const unsigned char *in, *inseg, *cipher_key;
64 unsigned char *out, *outseg;
65 size_t resid, todo, inlen, outlen;
66 uint32_t next_counter;
67 u_int i;
68
69 if (crp->crp_cipher_key != NULL)
70 cipher_key = crp->crp_cipher_key;
71 else
72 cipher_key = csp->csp_cipher_key;
73 for (i = 0; i < nitems(key); i++)
74 key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
75 crypto_read_iv(crp, counter);
76 for (i = 0; i < nitems(counter); i++)
77 counter[i] = le32toh(counter[i]);
78
79 resid = crp->crp_payload_length;
80 crypto_cursor_init(&cc_in, &crp->crp_buf);
81 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
82 inseg = crypto_cursor_segment(&cc_in, &inlen);
83 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
84 crypto_cursor_init(&cc_out, &crp->crp_obuf);
85 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
86 } else
87 cc_out = cc_in;
88 outseg = crypto_cursor_segment(&cc_out, &outlen);
89 while (resid >= CHACHA_BLK_SIZE) {
90 if (inlen < CHACHA_BLK_SIZE) {
91 crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
92 in = block;
93 inlen = CHACHA_BLK_SIZE;
94 } else
95 in = inseg;
96 if (outlen < CHACHA_BLK_SIZE) {
97 out = block;
98 outlen = CHACHA_BLK_SIZE;
99 } else
100 out = outseg;
101
102 /* Figure out how many blocks we can encrypt/decrypt at once. */
103 todo = rounddown(MIN(resid, MIN(inlen, outlen)),
104 CHACHA_BLK_SIZE);
105
106 #ifdef __LP64__
107 /* ChaCha20_ctr32() assumes length is <= 4GB. */
108 todo = (uint32_t)todo;
109 #endif
110
111 /* Truncate if the 32-bit counter would roll over. */
112 next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
113 if (next_counter < counter[0]) {
114 todo -= next_counter * CHACHA_BLK_SIZE;
115 next_counter = 0;
116 }
117
118 ChaCha20_ctr32(out, in, todo, key, counter);
119
120 counter[0] = next_counter;
121 if (counter[0] == 0)
122 counter[1]++;
123
124 if (out == block) {
125 crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
126 outseg = crypto_cursor_segment(&cc_out, &outlen);
127 } else {
128 crypto_cursor_advance(&cc_out, todo);
129 outseg += todo;
130 outlen -= todo;
131 }
132 if (in == block) {
133 inseg = crypto_cursor_segment(&cc_in, &inlen);
134 } else {
135 crypto_cursor_advance(&cc_in, todo);
136 inseg += todo;
137 inlen -= todo;
138 }
139 resid -= todo;
140 }
141
142 if (resid > 0) {
143 memset(block, 0, sizeof(block));
144 crypto_cursor_copydata(&cc_in, resid, block);
145 ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
146 crypto_cursor_copyback(&cc_out, resid, block);
147 }
148
149 explicit_bzero(block, sizeof(block));
150 explicit_bzero(counter, sizeof(counter));
151 explicit_bzero(key, sizeof(key));
152 return (0);
153 }
154
155 int
ossl_chacha20_poly1305_encrypt(struct cryptop * crp,const struct crypto_session_params * csp)156 ossl_chacha20_poly1305_encrypt(struct cryptop *crp,
157 const struct crypto_session_params *csp)
158 {
159 _Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
160 unsigned int counter[CHACHA_CTR_SIZE / 4];
161 _Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
162 unsigned char tag[POLY1305_HASH_LEN];
163 POLY1305 auth_ctx;
164 struct crypto_buffer_cursor cc_in, cc_out;
165 const unsigned char *in, *inseg, *cipher_key;
166 unsigned char *out, *outseg;
167 size_t resid, todo, inlen, outlen;
168 uint32_t next_counter;
169 u_int i;
170
171 if (crp->crp_cipher_key != NULL)
172 cipher_key = crp->crp_cipher_key;
173 else
174 cipher_key = csp->csp_cipher_key;
175 for (i = 0; i < nitems(key); i++)
176 key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
177
178 memset(counter, 0, sizeof(counter));
179 crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);
180 for (i = 1; i < nitems(counter); i++)
181 counter[i] = le32toh(counter[i]);
182
183 /* Block 0 is used to generate the poly1305 key. */
184 counter[0] = 0;
185
186 memset(block, 0, sizeof(block));
187 ChaCha20_ctr32(block, block, sizeof(block), key, counter);
188 Poly1305_Init(&auth_ctx, block);
189
190 /* MAC the AAD. */
191 if (crp->crp_aad != NULL)
192 Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
193 else
194 crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
195 ossl_poly1305_update, &auth_ctx);
196 if (crp->crp_aad_length % 16 != 0) {
197 /* padding1 */
198 memset(block, 0, 16);
199 Poly1305_Update(&auth_ctx, block,
200 16 - crp->crp_aad_length % 16);
201 }
202
203 /* Encryption starts with block 1. */
204 counter[0] = 1;
205
206 /* Do encryption with MAC */
207 resid = crp->crp_payload_length;
208 crypto_cursor_init(&cc_in, &crp->crp_buf);
209 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
210 inseg = crypto_cursor_segment(&cc_in, &inlen);
211 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
212 crypto_cursor_init(&cc_out, &crp->crp_obuf);
213 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
214 } else
215 cc_out = cc_in;
216 outseg = crypto_cursor_segment(&cc_out, &outlen);
217 while (resid >= CHACHA_BLK_SIZE) {
218 if (inlen < CHACHA_BLK_SIZE) {
219 crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
220 in = block;
221 inlen = CHACHA_BLK_SIZE;
222 } else
223 in = inseg;
224 if (outlen < CHACHA_BLK_SIZE) {
225 out = block;
226 outlen = CHACHA_BLK_SIZE;
227 } else
228 out = outseg;
229
230 /* Figure out how many blocks we can encrypt/decrypt at once. */
231 todo = rounddown(MIN(resid, MIN(inlen, outlen)),
232 CHACHA_BLK_SIZE);
233
234 #ifdef __LP64__
235 /* ChaCha20_ctr32() assumes length is <= 4GB. */
236 todo = (uint32_t)todo;
237 #endif
238
239 /* Truncate if the 32-bit counter would roll over. */
240 next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
241 if (csp->csp_ivlen == 8 && next_counter < counter[0]) {
242 todo -= next_counter * CHACHA_BLK_SIZE;
243 next_counter = 0;
244 }
245
246 ChaCha20_ctr32(out, in, todo, key, counter);
247 Poly1305_Update(&auth_ctx, out, todo);
248
249 counter[0] = next_counter;
250 if (csp->csp_ivlen == 8 && counter[0] == 0)
251 counter[1]++;
252
253 if (out == block) {
254 crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
255 outseg = crypto_cursor_segment(&cc_out, &outlen);
256 } else {
257 crypto_cursor_advance(&cc_out, todo);
258 outseg += todo;
259 outlen -= todo;
260 }
261 if (in == block) {
262 inseg = crypto_cursor_segment(&cc_in, &inlen);
263 } else {
264 crypto_cursor_advance(&cc_in, todo);
265 inseg += todo;
266 inlen -= todo;
267 }
268 resid -= todo;
269 }
270
271 if (resid > 0) {
272 memset(block, 0, sizeof(block));
273 crypto_cursor_copydata(&cc_in, resid, block);
274 ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
275 crypto_cursor_copyback(&cc_out, resid, block);
276
277 /* padding2 */
278 todo = roundup2(resid, 16);
279 memset(block + resid, 0, todo - resid);
280 Poly1305_Update(&auth_ctx, block, todo);
281 }
282
283 /* lengths */
284 le64enc(block, crp->crp_aad_length);
285 le64enc(block + 8, crp->crp_payload_length);
286 Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
287
288 Poly1305_Final(&auth_ctx, tag);
289 crypto_copyback(crp, crp->crp_digest_start, csp->csp_auth_mlen == 0 ?
290 POLY1305_HASH_LEN : csp->csp_auth_mlen, tag);
291
292 explicit_bzero(&auth_ctx, sizeof(auth_ctx));
293 explicit_bzero(tag, sizeof(tag));
294 explicit_bzero(block, sizeof(block));
295 explicit_bzero(counter, sizeof(counter));
296 explicit_bzero(key, sizeof(key));
297 return (0);
298 }
299
300
301 int
ossl_chacha20_poly1305_decrypt(struct cryptop * crp,const struct crypto_session_params * csp)302 ossl_chacha20_poly1305_decrypt(struct cryptop *crp,
303 const struct crypto_session_params *csp)
304 {
305 _Alignas(8) unsigned int key[CHACHA_KEY_SIZE / 4];
306 unsigned int counter[CHACHA_CTR_SIZE / 4];
307 _Alignas(8) unsigned char block[CHACHA_BLK_SIZE];
308 unsigned char tag[POLY1305_HASH_LEN], tag2[POLY1305_HASH_LEN];
309 struct poly1305_context auth_ctx;
310 struct crypto_buffer_cursor cc_in, cc_out;
311 const unsigned char *in, *inseg, *cipher_key;
312 unsigned char *out, *outseg;
313 size_t resid, todo, inlen, outlen;
314 uint32_t next_counter;
315 int error;
316 u_int i, mlen;
317
318 if (crp->crp_cipher_key != NULL)
319 cipher_key = crp->crp_cipher_key;
320 else
321 cipher_key = csp->csp_cipher_key;
322 for (i = 0; i < nitems(key); i++)
323 key[i] = CHACHA_U8TOU32(cipher_key + i * 4);
324
325 memset(counter, 0, sizeof(counter));
326 crypto_read_iv(crp, counter + (CHACHA_CTR_SIZE - csp->csp_ivlen) / 4);
327 for (i = 1; i < nitems(counter); i++)
328 counter[i] = le32toh(counter[i]);
329
330 /* Block 0 is used to generate the poly1305 key. */
331 counter[0] = 0;
332
333 memset(block, 0, sizeof(block));
334 ChaCha20_ctr32(block, block, sizeof(block), key, counter);
335 Poly1305_Init(&auth_ctx, block);
336
337 /* MAC the AAD. */
338 if (crp->crp_aad != NULL)
339 Poly1305_Update(&auth_ctx, crp->crp_aad, crp->crp_aad_length);
340 else
341 crypto_apply(crp, crp->crp_aad_start, crp->crp_aad_length,
342 ossl_poly1305_update, &auth_ctx);
343 if (crp->crp_aad_length % 16 != 0) {
344 /* padding1 */
345 memset(block, 0, 16);
346 Poly1305_Update(&auth_ctx, block,
347 16 - crp->crp_aad_length % 16);
348 }
349
350 /* Mac the ciphertext. */
351 crypto_apply(crp, crp->crp_payload_start, crp->crp_payload_length,
352 ossl_poly1305_update, &auth_ctx);
353 if (crp->crp_payload_length % 16 != 0) {
354 /* padding2 */
355 memset(block, 0, 16);
356 Poly1305_Update(&auth_ctx, block,
357 16 - crp->crp_payload_length % 16);
358 }
359
360 /* lengths */
361 le64enc(block, crp->crp_aad_length);
362 le64enc(block + 8, crp->crp_payload_length);
363 Poly1305_Update(&auth_ctx, block, sizeof(uint64_t) * 2);
364
365 Poly1305_Final(&auth_ctx, tag);
366 mlen = csp->csp_auth_mlen == 0 ? POLY1305_HASH_LEN : csp->csp_auth_mlen;
367 crypto_copydata(crp, crp->crp_digest_start, mlen, tag2);
368 if (timingsafe_bcmp(tag, tag2, mlen) != 0) {
369 error = EBADMSG;
370 goto out;
371 }
372
373 /* Decryption starts with block 1. */
374 counter[0] = 1;
375
376 resid = crp->crp_payload_length;
377 crypto_cursor_init(&cc_in, &crp->crp_buf);
378 crypto_cursor_advance(&cc_in, crp->crp_payload_start);
379 inseg = crypto_cursor_segment(&cc_in, &inlen);
380 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
381 crypto_cursor_init(&cc_out, &crp->crp_obuf);
382 crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
383 } else
384 cc_out = cc_in;
385 outseg = crypto_cursor_segment(&cc_out, &outlen);
386 while (resid >= CHACHA_BLK_SIZE) {
387 if (inlen < CHACHA_BLK_SIZE) {
388 crypto_cursor_copydata(&cc_in, CHACHA_BLK_SIZE, block);
389 in = block;
390 inlen = CHACHA_BLK_SIZE;
391 } else
392 in = inseg;
393 if (outlen < CHACHA_BLK_SIZE) {
394 out = block;
395 outlen = CHACHA_BLK_SIZE;
396 } else
397 out = outseg;
398
399 /* Figure out how many blocks we can encrypt/decrypt at once. */
400 todo = rounddown(MIN(resid, MIN(inlen, outlen)),
401 CHACHA_BLK_SIZE);
402
403 #ifdef __LP64__
404 /* ChaCha20_ctr32() assumes length is <= 4GB. */
405 todo = (uint32_t)todo;
406 #endif
407
408 /* Truncate if the 32-bit counter would roll over. */
409 next_counter = counter[0] + todo / CHACHA_BLK_SIZE;
410 if (csp->csp_ivlen == 8 && next_counter < counter[0]) {
411 todo -= next_counter * CHACHA_BLK_SIZE;
412 next_counter = 0;
413 }
414
415 ChaCha20_ctr32(out, in, todo, key, counter);
416
417 counter[0] = next_counter;
418 if (csp->csp_ivlen == 8 && counter[0] == 0)
419 counter[1]++;
420
421 if (out == block) {
422 crypto_cursor_copyback(&cc_out, CHACHA_BLK_SIZE, block);
423 outseg = crypto_cursor_segment(&cc_out, &outlen);
424 } else {
425 crypto_cursor_advance(&cc_out, todo);
426 outseg += todo;
427 outlen -= todo;
428 }
429 if (in == block) {
430 inseg = crypto_cursor_segment(&cc_in, &inlen);
431 } else {
432 crypto_cursor_advance(&cc_in, todo);
433 inseg += todo;
434 inlen -= todo;
435 }
436 resid -= todo;
437 }
438
439 if (resid > 0) {
440 memset(block, 0, sizeof(block));
441 crypto_cursor_copydata(&cc_in, resid, block);
442 ChaCha20_ctr32(block, block, CHACHA_BLK_SIZE, key, counter);
443 crypto_cursor_copyback(&cc_out, resid, block);
444 }
445
446 error = 0;
447 out:
448 explicit_bzero(&auth_ctx, sizeof(auth_ctx));
449 explicit_bzero(tag, sizeof(tag));
450 explicit_bzero(block, sizeof(block));
451 explicit_bzero(counter, sizeof(counter));
452 explicit_bzero(key, sizeof(key));
453 return (error);
454 }
455