xref: /linux/arch/x86/crypto/aegis128-aesni-glue.c (revision e9f0878c4b2004ac19581274c1ae4c61ae3ca70e)
1 /*
2  * The AEGIS-128 Authenticated-Encryption Algorithm
3  *   Glue for AES-NI + SSE2 implementation
4  *
5  * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6  * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License as published by the Free
10  * Software Foundation; either version 2 of the License, or (at your option)
11  * any later version.
12  */
13 
14 #include <crypto/cryptd.h>
15 #include <crypto/internal/aead.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/module.h>
19 #include <asm/fpu/api.h>
20 #include <asm/cpu_device_id.h>
21 
22 #define AEGIS128_BLOCK_ALIGN 16
23 #define AEGIS128_BLOCK_SIZE 16
24 #define AEGIS128_NONCE_SIZE 16
25 #define AEGIS128_STATE_BLOCKS 5
26 #define AEGIS128_KEY_SIZE 16
27 #define AEGIS128_MIN_AUTH_SIZE 8
28 #define AEGIS128_MAX_AUTH_SIZE 16
29 
30 asmlinkage void crypto_aegis128_aesni_init(void *state, void *key, void *iv);
31 
32 asmlinkage void crypto_aegis128_aesni_ad(
33 		void *state, unsigned int length, const void *data);
34 
35 asmlinkage void crypto_aegis128_aesni_enc(
36 		void *state, unsigned int length, const void *src, void *dst);
37 
38 asmlinkage void crypto_aegis128_aesni_dec(
39 		void *state, unsigned int length, const void *src, void *dst);
40 
41 asmlinkage void crypto_aegis128_aesni_enc_tail(
42 		void *state, unsigned int length, const void *src, void *dst);
43 
44 asmlinkage void crypto_aegis128_aesni_dec_tail(
45 		void *state, unsigned int length, const void *src, void *dst);
46 
47 asmlinkage void crypto_aegis128_aesni_final(
48 		void *state, void *tag_xor, unsigned int cryptlen,
49 		unsigned int assoclen);
50 
51 struct aegis_block {
52 	u8 bytes[AEGIS128_BLOCK_SIZE] __aligned(AEGIS128_BLOCK_ALIGN);
53 };
54 
55 struct aegis_state {
56 	struct aegis_block blocks[AEGIS128_STATE_BLOCKS];
57 };
58 
59 struct aegis_ctx {
60 	struct aegis_block key;
61 };
62 
63 struct aegis_crypt_ops {
64 	int (*skcipher_walk_init)(struct skcipher_walk *walk,
65 				  struct aead_request *req, bool atomic);
66 
67 	void (*crypt_blocks)(void *state, unsigned int length, const void *src,
68 			     void *dst);
69 	void (*crypt_tail)(void *state, unsigned int length, const void *src,
70 			   void *dst);
71 };
72 
73 static void crypto_aegis128_aesni_process_ad(
74 		struct aegis_state *state, struct scatterlist *sg_src,
75 		unsigned int assoclen)
76 {
77 	struct scatter_walk walk;
78 	struct aegis_block buf;
79 	unsigned int pos = 0;
80 
81 	scatterwalk_start(&walk, sg_src);
82 	while (assoclen != 0) {
83 		unsigned int size = scatterwalk_clamp(&walk, assoclen);
84 		unsigned int left = size;
85 		void *mapped = scatterwalk_map(&walk);
86 		const u8 *src = (const u8 *)mapped;
87 
88 		if (pos + size >= AEGIS128_BLOCK_SIZE) {
89 			if (pos > 0) {
90 				unsigned int fill = AEGIS128_BLOCK_SIZE - pos;
91 				memcpy(buf.bytes + pos, src, fill);
92 				crypto_aegis128_aesni_ad(state,
93 							 AEGIS128_BLOCK_SIZE,
94 							 buf.bytes);
95 				pos = 0;
96 				left -= fill;
97 				src += fill;
98 			}
99 
100 			crypto_aegis128_aesni_ad(state, left, src);
101 
102 			src += left & ~(AEGIS128_BLOCK_SIZE - 1);
103 			left &= AEGIS128_BLOCK_SIZE - 1;
104 		}
105 
106 		memcpy(buf.bytes + pos, src, left);
107 		pos += left;
108 		assoclen -= size;
109 
110 		scatterwalk_unmap(mapped);
111 		scatterwalk_advance(&walk, size);
112 		scatterwalk_done(&walk, 0, assoclen);
113 	}
114 
115 	if (pos > 0) {
116 		memset(buf.bytes + pos, 0, AEGIS128_BLOCK_SIZE - pos);
117 		crypto_aegis128_aesni_ad(state, AEGIS128_BLOCK_SIZE, buf.bytes);
118 	}
119 }
120 
121 static void crypto_aegis128_aesni_process_crypt(
122 		struct aegis_state *state, struct aead_request *req,
123 		const struct aegis_crypt_ops *ops)
124 {
125 	struct skcipher_walk walk;
126 	u8 *src, *dst;
127 	unsigned int chunksize, base;
128 
129 	ops->skcipher_walk_init(&walk, req, false);
130 
131 	while (walk.nbytes) {
132 		src = walk.src.virt.addr;
133 		dst = walk.dst.virt.addr;
134 		chunksize = walk.nbytes;
135 
136 		ops->crypt_blocks(state, chunksize, src, dst);
137 
138 		base = chunksize & ~(AEGIS128_BLOCK_SIZE - 1);
139 		src += base;
140 		dst += base;
141 		chunksize &= AEGIS128_BLOCK_SIZE - 1;
142 
143 		if (chunksize > 0)
144 			ops->crypt_tail(state, chunksize, src, dst);
145 
146 		skcipher_walk_done(&walk, 0);
147 	}
148 }
149 
150 static struct aegis_ctx *crypto_aegis128_aesni_ctx(struct crypto_aead *aead)
151 {
152 	u8 *ctx = crypto_aead_ctx(aead);
153 	ctx = PTR_ALIGN(ctx, __alignof__(struct aegis_ctx));
154 	return (void *)ctx;
155 }
156 
157 static int crypto_aegis128_aesni_setkey(struct crypto_aead *aead, const u8 *key,
158 					unsigned int keylen)
159 {
160 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(aead);
161 
162 	if (keylen != AEGIS128_KEY_SIZE) {
163 		crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
164 		return -EINVAL;
165 	}
166 
167 	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
168 
169 	return 0;
170 }
171 
172 static int crypto_aegis128_aesni_setauthsize(struct crypto_aead *tfm,
173 						unsigned int authsize)
174 {
175 	if (authsize > AEGIS128_MAX_AUTH_SIZE)
176 		return -EINVAL;
177 	if (authsize < AEGIS128_MIN_AUTH_SIZE)
178 		return -EINVAL;
179 	return 0;
180 }
181 
182 static void crypto_aegis128_aesni_crypt(struct aead_request *req,
183 					struct aegis_block *tag_xor,
184 					unsigned int cryptlen,
185 					const struct aegis_crypt_ops *ops)
186 {
187 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
188 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm);
189 	struct aegis_state state;
190 
191 	kernel_fpu_begin();
192 
193 	crypto_aegis128_aesni_init(&state, ctx->key.bytes, req->iv);
194 	crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen);
195 	crypto_aegis128_aesni_process_crypt(&state, req, ops);
196 	crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen);
197 
198 	kernel_fpu_end();
199 }
200 
201 static int crypto_aegis128_aesni_encrypt(struct aead_request *req)
202 {
203 	static const struct aegis_crypt_ops OPS = {
204 		.skcipher_walk_init = skcipher_walk_aead_encrypt,
205 		.crypt_blocks = crypto_aegis128_aesni_enc,
206 		.crypt_tail = crypto_aegis128_aesni_enc_tail,
207 	};
208 
209 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
210 	struct aegis_block tag = {};
211 	unsigned int authsize = crypto_aead_authsize(tfm);
212 	unsigned int cryptlen = req->cryptlen;
213 
214 	crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS);
215 
216 	scatterwalk_map_and_copy(tag.bytes, req->dst,
217 				 req->assoclen + cryptlen, authsize, 1);
218 	return 0;
219 }
220 
221 static int crypto_aegis128_aesni_decrypt(struct aead_request *req)
222 {
223 	static const struct aegis_block zeros = {};
224 
225 	static const struct aegis_crypt_ops OPS = {
226 		.skcipher_walk_init = skcipher_walk_aead_decrypt,
227 		.crypt_blocks = crypto_aegis128_aesni_dec,
228 		.crypt_tail = crypto_aegis128_aesni_dec_tail,
229 	};
230 
231 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
232 	struct aegis_block tag;
233 	unsigned int authsize = crypto_aead_authsize(tfm);
234 	unsigned int cryptlen = req->cryptlen - authsize;
235 
236 	scatterwalk_map_and_copy(tag.bytes, req->src,
237 				 req->assoclen + cryptlen, authsize, 0);
238 
239 	crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS);
240 
241 	return crypto_memneq(tag.bytes, zeros.bytes, authsize) ? -EBADMSG : 0;
242 }
243 
244 static int crypto_aegis128_aesni_init_tfm(struct crypto_aead *aead)
245 {
246 	return 0;
247 }
248 
249 static void crypto_aegis128_aesni_exit_tfm(struct crypto_aead *aead)
250 {
251 }
252 
253 static int cryptd_aegis128_aesni_setkey(struct crypto_aead *aead,
254 					const u8 *key, unsigned int keylen)
255 {
256 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
257 	struct cryptd_aead *cryptd_tfm = *ctx;
258 
259 	return crypto_aead_setkey(&cryptd_tfm->base, key, keylen);
260 }
261 
262 static int cryptd_aegis128_aesni_setauthsize(struct crypto_aead *aead,
263 					     unsigned int authsize)
264 {
265 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
266 	struct cryptd_aead *cryptd_tfm = *ctx;
267 
268 	return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
269 }
270 
271 static int cryptd_aegis128_aesni_encrypt(struct aead_request *req)
272 {
273 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
274 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
275 	struct cryptd_aead *cryptd_tfm = *ctx;
276 
277 	aead = &cryptd_tfm->base;
278 	if (irq_fpu_usable() && (!in_atomic() ||
279 				 !cryptd_aead_queued(cryptd_tfm)))
280 		aead = cryptd_aead_child(cryptd_tfm);
281 
282 	aead_request_set_tfm(req, aead);
283 
284 	return crypto_aead_encrypt(req);
285 }
286 
287 static int cryptd_aegis128_aesni_decrypt(struct aead_request *req)
288 {
289 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
290 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
291 	struct cryptd_aead *cryptd_tfm = *ctx;
292 
293 	aead = &cryptd_tfm->base;
294 	if (irq_fpu_usable() && (!in_atomic() ||
295 				 !cryptd_aead_queued(cryptd_tfm)))
296 		aead = cryptd_aead_child(cryptd_tfm);
297 
298 	aead_request_set_tfm(req, aead);
299 
300 	return crypto_aead_decrypt(req);
301 }
302 
303 static int cryptd_aegis128_aesni_init_tfm(struct crypto_aead *aead)
304 {
305 	struct cryptd_aead *cryptd_tfm;
306 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
307 
308 	cryptd_tfm = cryptd_alloc_aead("__aegis128-aesni", CRYPTO_ALG_INTERNAL,
309 				       CRYPTO_ALG_INTERNAL);
310 	if (IS_ERR(cryptd_tfm))
311 		return PTR_ERR(cryptd_tfm);
312 
313 	*ctx = cryptd_tfm;
314 	crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
315 	return 0;
316 }
317 
318 static void cryptd_aegis128_aesni_exit_tfm(struct crypto_aead *aead)
319 {
320 	struct cryptd_aead **ctx = crypto_aead_ctx(aead);
321 
322 	cryptd_free_aead(*ctx);
323 }
324 
325 static struct aead_alg crypto_aegis128_aesni_alg[] = {
326 	{
327 		.setkey = crypto_aegis128_aesni_setkey,
328 		.setauthsize = crypto_aegis128_aesni_setauthsize,
329 		.encrypt = crypto_aegis128_aesni_encrypt,
330 		.decrypt = crypto_aegis128_aesni_decrypt,
331 		.init = crypto_aegis128_aesni_init_tfm,
332 		.exit = crypto_aegis128_aesni_exit_tfm,
333 
334 		.ivsize = AEGIS128_NONCE_SIZE,
335 		.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
336 		.chunksize = AEGIS128_BLOCK_SIZE,
337 
338 		.base = {
339 			.cra_flags = CRYPTO_ALG_INTERNAL,
340 			.cra_blocksize = 1,
341 			.cra_ctxsize = sizeof(struct aegis_ctx) +
342 				__alignof__(struct aegis_ctx),
343 			.cra_alignmask = 0,
344 
345 			.cra_name = "__aegis128",
346 			.cra_driver_name = "__aegis128-aesni",
347 
348 			.cra_module = THIS_MODULE,
349 		}
350 	}, {
351 		.setkey = cryptd_aegis128_aesni_setkey,
352 		.setauthsize = cryptd_aegis128_aesni_setauthsize,
353 		.encrypt = cryptd_aegis128_aesni_encrypt,
354 		.decrypt = cryptd_aegis128_aesni_decrypt,
355 		.init = cryptd_aegis128_aesni_init_tfm,
356 		.exit = cryptd_aegis128_aesni_exit_tfm,
357 
358 		.ivsize = AEGIS128_NONCE_SIZE,
359 		.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
360 		.chunksize = AEGIS128_BLOCK_SIZE,
361 
362 		.base = {
363 			.cra_flags = CRYPTO_ALG_ASYNC,
364 			.cra_blocksize = 1,
365 			.cra_ctxsize = sizeof(struct cryptd_aead *),
366 			.cra_alignmask = 0,
367 
368 			.cra_priority = 400,
369 
370 			.cra_name = "aegis128",
371 			.cra_driver_name = "aegis128-aesni",
372 
373 			.cra_module = THIS_MODULE,
374 		}
375 	}
376 };
377 
378 static int __init crypto_aegis128_aesni_module_init(void)
379 {
380 	if (!boot_cpu_has(X86_FEATURE_XMM2) ||
381 	    !boot_cpu_has(X86_FEATURE_AES) ||
382 	    !boot_cpu_has(X86_FEATURE_OSXSAVE) ||
383 	    !cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
384 		return -ENODEV;
385 
386 	return crypto_register_aeads(crypto_aegis128_aesni_alg,
387 				     ARRAY_SIZE(crypto_aegis128_aesni_alg));
388 }
389 
390 static void __exit crypto_aegis128_aesni_module_exit(void)
391 {
392 	crypto_unregister_aeads(crypto_aegis128_aesni_alg,
393 				ARRAY_SIZE(crypto_aegis128_aesni_alg));
394 }
395 
396 module_init(crypto_aegis128_aesni_module_init);
397 module_exit(crypto_aegis128_aesni_module_exit);
398 
399 MODULE_LICENSE("GPL");
400 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
401 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm -- AESNI+SSE2 implementation");
402 MODULE_ALIAS_CRYPTO("aegis128");
403 MODULE_ALIAS_CRYPTO("aegis128-aesni");
404