xref: /linux/crypto/aegis128-core.c (revision 56fb34d86e875dbb0d3e6a81c5d3d035db373031)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * The AEGIS-128 Authenticated-Encryption Algorithm
4  *
5  * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6  * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
7  */
8 
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/scatterlist.h>
19 
20 #include <asm/simd.h>
21 
22 #include "aegis.h"
23 
24 #define AEGIS128_NONCE_SIZE 16
25 #define AEGIS128_STATE_BLOCKS 5
26 #define AEGIS128_KEY_SIZE 16
27 #define AEGIS128_MIN_AUTH_SIZE 8
28 #define AEGIS128_MAX_AUTH_SIZE 16
29 
30 struct aegis_state {
31 	union aegis_block blocks[AEGIS128_STATE_BLOCKS];
32 };
33 
34 struct aegis_ctx {
35 	union aegis_block key;
36 };
37 
38 struct aegis128_ops {
39 	int (*skcipher_walk_init)(struct skcipher_walk *walk,
40 				  struct aead_request *req, bool atomic);
41 
42 	void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
43 			    const u8 *src, unsigned int size);
44 };
45 
46 static bool have_simd;
47 
48 static const union aegis_block crypto_aegis_const[2] = {
49 	{ .words64 = {
50 		cpu_to_le64(U64_C(0x0d08050302010100)),
51 		cpu_to_le64(U64_C(0x6279e99059372215)),
52 	} },
53 	{ .words64 = {
54 		cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
55 		cpu_to_le64(U64_C(0xdd28b57342311120)),
56 	} },
57 };
58 
59 static bool aegis128_do_simd(void)
60 {
61 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD
62 	if (have_simd)
63 		return crypto_simd_usable();
64 #endif
65 	return false;
66 }
67 
68 bool crypto_aegis128_have_simd(void);
69 void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg);
70 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst,
71 					const u8 *src, unsigned int size);
72 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst,
73 					const u8 *src, unsigned int size);
74 
75 static void crypto_aegis128_update(struct aegis_state *state)
76 {
77 	union aegis_block tmp;
78 	unsigned int i;
79 
80 	tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
81 	for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
82 		crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
83 				    &state->blocks[i]);
84 	crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
85 }
86 
87 static void crypto_aegis128_update_a(struct aegis_state *state,
88 				     const union aegis_block *msg)
89 {
90 	if (aegis128_do_simd()) {
91 		crypto_aegis128_update_simd(state, msg);
92 		return;
93 	}
94 
95 	crypto_aegis128_update(state);
96 	crypto_aegis_block_xor(&state->blocks[0], msg);
97 }
98 
99 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg)
100 {
101 	if (aegis128_do_simd()) {
102 		crypto_aegis128_update_simd(state, msg);
103 		return;
104 	}
105 
106 	crypto_aegis128_update(state);
107 	crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
108 }
109 
110 static void crypto_aegis128_init(struct aegis_state *state,
111 				 const union aegis_block *key,
112 				 const u8 *iv)
113 {
114 	union aegis_block key_iv;
115 	unsigned int i;
116 
117 	key_iv = *key;
118 	crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE);
119 
120 	state->blocks[0] = key_iv;
121 	state->blocks[1] = crypto_aegis_const[1];
122 	state->blocks[2] = crypto_aegis_const[0];
123 	state->blocks[3] = *key;
124 	state->blocks[4] = *key;
125 
126 	crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]);
127 	crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]);
128 
129 	for (i = 0; i < 5; i++) {
130 		crypto_aegis128_update_a(state, key);
131 		crypto_aegis128_update_a(state, &key_iv);
132 	}
133 }
134 
135 static void crypto_aegis128_ad(struct aegis_state *state,
136 			       const u8 *src, unsigned int size)
137 {
138 	if (AEGIS_ALIGNED(src)) {
139 		const union aegis_block *src_blk =
140 				(const union aegis_block *)src;
141 
142 		while (size >= AEGIS_BLOCK_SIZE) {
143 			crypto_aegis128_update_a(state, src_blk);
144 
145 			size -= AEGIS_BLOCK_SIZE;
146 			src_blk++;
147 		}
148 	} else {
149 		while (size >= AEGIS_BLOCK_SIZE) {
150 			crypto_aegis128_update_u(state, src);
151 
152 			size -= AEGIS_BLOCK_SIZE;
153 			src += AEGIS_BLOCK_SIZE;
154 		}
155 	}
156 }
157 
158 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
159 					  const u8 *src, unsigned int size)
160 {
161 	union aegis_block tmp;
162 
163 	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
164 		while (size >= AEGIS_BLOCK_SIZE) {
165 			union aegis_block *dst_blk =
166 					(union aegis_block *)dst;
167 			const union aegis_block *src_blk =
168 					(const union aegis_block *)src;
169 
170 			tmp = state->blocks[2];
171 			crypto_aegis_block_and(&tmp, &state->blocks[3]);
172 			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
173 			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
174 			crypto_aegis_block_xor(&tmp, src_blk);
175 
176 			crypto_aegis128_update_a(state, src_blk);
177 
178 			*dst_blk = tmp;
179 
180 			size -= AEGIS_BLOCK_SIZE;
181 			src += AEGIS_BLOCK_SIZE;
182 			dst += AEGIS_BLOCK_SIZE;
183 		}
184 	} else {
185 		while (size >= AEGIS_BLOCK_SIZE) {
186 			tmp = state->blocks[2];
187 			crypto_aegis_block_and(&tmp, &state->blocks[3]);
188 			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
189 			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
190 			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
191 
192 			crypto_aegis128_update_u(state, src);
193 
194 			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
195 
196 			size -= AEGIS_BLOCK_SIZE;
197 			src += AEGIS_BLOCK_SIZE;
198 			dst += AEGIS_BLOCK_SIZE;
199 		}
200 	}
201 
202 	if (size > 0) {
203 		union aegis_block msg = {};
204 		memcpy(msg.bytes, src, size);
205 
206 		tmp = state->blocks[2];
207 		crypto_aegis_block_and(&tmp, &state->blocks[3]);
208 		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
209 		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
210 
211 		crypto_aegis128_update_a(state, &msg);
212 
213 		crypto_aegis_block_xor(&msg, &tmp);
214 
215 		memcpy(dst, msg.bytes, size);
216 	}
217 }
218 
219 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
220 					  const u8 *src, unsigned int size)
221 {
222 	union aegis_block tmp;
223 
224 	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
225 		while (size >= AEGIS_BLOCK_SIZE) {
226 			union aegis_block *dst_blk =
227 					(union aegis_block *)dst;
228 			const union aegis_block *src_blk =
229 					(const union aegis_block *)src;
230 
231 			tmp = state->blocks[2];
232 			crypto_aegis_block_and(&tmp, &state->blocks[3]);
233 			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
234 			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
235 			crypto_aegis_block_xor(&tmp, src_blk);
236 
237 			crypto_aegis128_update_a(state, &tmp);
238 
239 			*dst_blk = tmp;
240 
241 			size -= AEGIS_BLOCK_SIZE;
242 			src += AEGIS_BLOCK_SIZE;
243 			dst += AEGIS_BLOCK_SIZE;
244 		}
245 	} else {
246 		while (size >= AEGIS_BLOCK_SIZE) {
247 			tmp = state->blocks[2];
248 			crypto_aegis_block_and(&tmp, &state->blocks[3]);
249 			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
250 			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
251 			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
252 
253 			crypto_aegis128_update_a(state, &tmp);
254 
255 			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
256 
257 			size -= AEGIS_BLOCK_SIZE;
258 			src += AEGIS_BLOCK_SIZE;
259 			dst += AEGIS_BLOCK_SIZE;
260 		}
261 	}
262 
263 	if (size > 0) {
264 		union aegis_block msg = {};
265 		memcpy(msg.bytes, src, size);
266 
267 		tmp = state->blocks[2];
268 		crypto_aegis_block_and(&tmp, &state->blocks[3]);
269 		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
270 		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
271 		crypto_aegis_block_xor(&msg, &tmp);
272 
273 		memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
274 
275 		crypto_aegis128_update_a(state, &msg);
276 
277 		memcpy(dst, msg.bytes, size);
278 	}
279 }
280 
281 static void crypto_aegis128_process_ad(struct aegis_state *state,
282 				       struct scatterlist *sg_src,
283 				       unsigned int assoclen)
284 {
285 	struct scatter_walk walk;
286 	union aegis_block buf;
287 	unsigned int pos = 0;
288 
289 	scatterwalk_start(&walk, sg_src);
290 	while (assoclen != 0) {
291 		unsigned int size = scatterwalk_clamp(&walk, assoclen);
292 		unsigned int left = size;
293 		void *mapped = scatterwalk_map(&walk);
294 		const u8 *src = (const u8 *)mapped;
295 
296 		if (pos + size >= AEGIS_BLOCK_SIZE) {
297 			if (pos > 0) {
298 				unsigned int fill = AEGIS_BLOCK_SIZE - pos;
299 				memcpy(buf.bytes + pos, src, fill);
300 				crypto_aegis128_update_a(state, &buf);
301 				pos = 0;
302 				left -= fill;
303 				src += fill;
304 			}
305 
306 			crypto_aegis128_ad(state, src, left);
307 			src += left & ~(AEGIS_BLOCK_SIZE - 1);
308 			left &= AEGIS_BLOCK_SIZE - 1;
309 		}
310 
311 		memcpy(buf.bytes + pos, src, left);
312 
313 		pos += left;
314 		assoclen -= size;
315 		scatterwalk_unmap(mapped);
316 		scatterwalk_advance(&walk, size);
317 		scatterwalk_done(&walk, 0, assoclen);
318 	}
319 
320 	if (pos > 0) {
321 		memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
322 		crypto_aegis128_update_a(state, &buf);
323 	}
324 }
325 
326 static void crypto_aegis128_process_crypt(struct aegis_state *state,
327 					  struct aead_request *req,
328 					  const struct aegis128_ops *ops)
329 {
330 	struct skcipher_walk walk;
331 
332 	ops->skcipher_walk_init(&walk, req, false);
333 
334 	while (walk.nbytes) {
335 		unsigned int nbytes = walk.nbytes;
336 
337 		if (nbytes < walk.total)
338 			nbytes = round_down(nbytes, walk.stride);
339 
340 		ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
341 				 nbytes);
342 
343 		skcipher_walk_done(&walk, walk.nbytes - nbytes);
344 	}
345 }
346 
347 static void crypto_aegis128_final(struct aegis_state *state,
348 				  union aegis_block *tag_xor,
349 				  u64 assoclen, u64 cryptlen)
350 {
351 	u64 assocbits = assoclen * 8;
352 	u64 cryptbits = cryptlen * 8;
353 
354 	union aegis_block tmp;
355 	unsigned int i;
356 
357 	tmp.words64[0] = cpu_to_le64(assocbits);
358 	tmp.words64[1] = cpu_to_le64(cryptbits);
359 
360 	crypto_aegis_block_xor(&tmp, &state->blocks[3]);
361 
362 	for (i = 0; i < 7; i++)
363 		crypto_aegis128_update_a(state, &tmp);
364 
365 	for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
366 		crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
367 }
368 
369 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
370 				  unsigned int keylen)
371 {
372 	struct aegis_ctx *ctx = crypto_aead_ctx(aead);
373 
374 	if (keylen != AEGIS128_KEY_SIZE) {
375 		crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
376 		return -EINVAL;
377 	}
378 
379 	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
380 	return 0;
381 }
382 
383 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
384 				       unsigned int authsize)
385 {
386 	if (authsize > AEGIS128_MAX_AUTH_SIZE)
387 		return -EINVAL;
388 	if (authsize < AEGIS128_MIN_AUTH_SIZE)
389 		return -EINVAL;
390 	return 0;
391 }
392 
393 static void crypto_aegis128_crypt(struct aead_request *req,
394 				  union aegis_block *tag_xor,
395 				  unsigned int cryptlen,
396 				  const struct aegis128_ops *ops)
397 {
398 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
399 	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
400 	struct aegis_state state;
401 
402 	crypto_aegis128_init(&state, &ctx->key, req->iv);
403 	crypto_aegis128_process_ad(&state, req->src, req->assoclen);
404 	crypto_aegis128_process_crypt(&state, req, ops);
405 	crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen);
406 }
407 
408 static int crypto_aegis128_encrypt(struct aead_request *req)
409 {
410 	const struct aegis128_ops *ops = &(struct aegis128_ops){
411 		.skcipher_walk_init = skcipher_walk_aead_encrypt,
412 		.crypt_chunk = crypto_aegis128_encrypt_chunk,
413 	};
414 
415 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
416 	union aegis_block tag = {};
417 	unsigned int authsize = crypto_aead_authsize(tfm);
418 	unsigned int cryptlen = req->cryptlen;
419 
420 	if (aegis128_do_simd())
421 		ops = &(struct aegis128_ops){
422 			.skcipher_walk_init = skcipher_walk_aead_encrypt,
423 			.crypt_chunk = crypto_aegis128_encrypt_chunk_simd };
424 
425 	crypto_aegis128_crypt(req, &tag, cryptlen, ops);
426 
427 	scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
428 				 authsize, 1);
429 	return 0;
430 }
431 
432 static int crypto_aegis128_decrypt(struct aead_request *req)
433 {
434 	const struct aegis128_ops *ops = &(struct aegis128_ops){
435 		.skcipher_walk_init = skcipher_walk_aead_decrypt,
436 		.crypt_chunk = crypto_aegis128_decrypt_chunk,
437 	};
438 	static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
439 
440 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
441 	union aegis_block tag;
442 	unsigned int authsize = crypto_aead_authsize(tfm);
443 	unsigned int cryptlen = req->cryptlen - authsize;
444 
445 	scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
446 				 authsize, 0);
447 
448 	if (aegis128_do_simd())
449 		ops = &(struct aegis128_ops){
450 			.skcipher_walk_init = skcipher_walk_aead_decrypt,
451 			.crypt_chunk = crypto_aegis128_decrypt_chunk_simd };
452 
453 	crypto_aegis128_crypt(req, &tag, cryptlen, ops);
454 
455 	return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
456 }
457 
458 static struct aead_alg crypto_aegis128_alg = {
459 	.setkey = crypto_aegis128_setkey,
460 	.setauthsize = crypto_aegis128_setauthsize,
461 	.encrypt = crypto_aegis128_encrypt,
462 	.decrypt = crypto_aegis128_decrypt,
463 
464 	.ivsize = AEGIS128_NONCE_SIZE,
465 	.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
466 	.chunksize = AEGIS_BLOCK_SIZE,
467 
468 	.base = {
469 		.cra_blocksize = 1,
470 		.cra_ctxsize = sizeof(struct aegis_ctx),
471 		.cra_alignmask = 0,
472 
473 		.cra_priority = 100,
474 
475 		.cra_name = "aegis128",
476 		.cra_driver_name = "aegis128-generic",
477 
478 		.cra_module = THIS_MODULE,
479 	}
480 };
481 
482 static int __init crypto_aegis128_module_init(void)
483 {
484 	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD))
485 		have_simd = crypto_aegis128_have_simd();
486 
487 	return crypto_register_aead(&crypto_aegis128_alg);
488 }
489 
490 static void __exit crypto_aegis128_module_exit(void)
491 {
492 	crypto_unregister_aead(&crypto_aegis128_alg);
493 }
494 
495 subsys_initcall(crypto_aegis128_module_init);
496 module_exit(crypto_aegis128_module_exit);
497 
498 MODULE_LICENSE("GPL");
499 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
500 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
501 MODULE_ALIAS_CRYPTO("aegis128");
502 MODULE_ALIAS_CRYPTO("aegis128-generic");
503