xref: /linux/arch/x86/crypto/aegis128-aesni-glue.c (revision a4eb44a6435d6d8f9e642407a4a06f65eb90ca04)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * The AEGIS-128 Authenticated-Encryption Algorithm
4  *   Glue for AES-NI + SSE2 implementation
5  *
6  * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
7  * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
8  */
9 
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/module.h>
15 #include <asm/fpu/api.h>
16 #include <asm/cpu_device_id.h>
17 
18 #define AEGIS128_BLOCK_ALIGN 16
19 #define AEGIS128_BLOCK_SIZE 16
20 #define AEGIS128_NONCE_SIZE 16
21 #define AEGIS128_STATE_BLOCKS 5
22 #define AEGIS128_KEY_SIZE 16
23 #define AEGIS128_MIN_AUTH_SIZE 8
24 #define AEGIS128_MAX_AUTH_SIZE 16
25 
26 asmlinkage void crypto_aegis128_aesni_init(void *state, void *key, void *iv);
27 
28 asmlinkage void crypto_aegis128_aesni_ad(
29 		void *state, unsigned int length, const void *data);
30 
31 asmlinkage void crypto_aegis128_aesni_enc(
32 		void *state, unsigned int length, const void *src, void *dst);
33 
34 asmlinkage void crypto_aegis128_aesni_dec(
35 		void *state, unsigned int length, const void *src, void *dst);
36 
37 asmlinkage void crypto_aegis128_aesni_enc_tail(
38 		void *state, unsigned int length, const void *src, void *dst);
39 
40 asmlinkage void crypto_aegis128_aesni_dec_tail(
41 		void *state, unsigned int length, const void *src, void *dst);
42 
43 asmlinkage void crypto_aegis128_aesni_final(
44 		void *state, void *tag_xor, unsigned int cryptlen,
45 		unsigned int assoclen);
46 
47 struct aegis_block {
48 	u8 bytes[AEGIS128_BLOCK_SIZE] __aligned(AEGIS128_BLOCK_ALIGN);
49 };
50 
51 struct aegis_state {
52 	struct aegis_block blocks[AEGIS128_STATE_BLOCKS];
53 };
54 
55 struct aegis_ctx {
56 	struct aegis_block key;
57 };
58 
59 struct aegis_crypt_ops {
60 	int (*skcipher_walk_init)(struct skcipher_walk *walk,
61 				  struct aead_request *req, bool atomic);
62 
63 	void (*crypt_blocks)(void *state, unsigned int length, const void *src,
64 			     void *dst);
65 	void (*crypt_tail)(void *state, unsigned int length, const void *src,
66 			   void *dst);
67 };
68 
69 static void crypto_aegis128_aesni_process_ad(
70 		struct aegis_state *state, struct scatterlist *sg_src,
71 		unsigned int assoclen)
72 {
73 	struct scatter_walk walk;
74 	struct aegis_block buf;
75 	unsigned int pos = 0;
76 
77 	scatterwalk_start(&walk, sg_src);
78 	while (assoclen != 0) {
79 		unsigned int size = scatterwalk_clamp(&walk, assoclen);
80 		unsigned int left = size;
81 		void *mapped = scatterwalk_map(&walk);
82 		const u8 *src = (const u8 *)mapped;
83 
84 		if (pos + size >= AEGIS128_BLOCK_SIZE) {
85 			if (pos > 0) {
86 				unsigned int fill = AEGIS128_BLOCK_SIZE - pos;
87 				memcpy(buf.bytes + pos, src, fill);
88 				crypto_aegis128_aesni_ad(state,
89 							 AEGIS128_BLOCK_SIZE,
90 							 buf.bytes);
91 				pos = 0;
92 				left -= fill;
93 				src += fill;
94 			}
95 
96 			crypto_aegis128_aesni_ad(state, left, src);
97 
98 			src += left & ~(AEGIS128_BLOCK_SIZE - 1);
99 			left &= AEGIS128_BLOCK_SIZE - 1;
100 		}
101 
102 		memcpy(buf.bytes + pos, src, left);
103 		pos += left;
104 		assoclen -= size;
105 
106 		scatterwalk_unmap(mapped);
107 		scatterwalk_advance(&walk, size);
108 		scatterwalk_done(&walk, 0, assoclen);
109 	}
110 
111 	if (pos > 0) {
112 		memset(buf.bytes + pos, 0, AEGIS128_BLOCK_SIZE - pos);
113 		crypto_aegis128_aesni_ad(state, AEGIS128_BLOCK_SIZE, buf.bytes);
114 	}
115 }
116 
117 static void crypto_aegis128_aesni_process_crypt(
118 		struct aegis_state *state, struct skcipher_walk *walk,
119 		const struct aegis_crypt_ops *ops)
120 {
121 	while (walk->nbytes >= AEGIS128_BLOCK_SIZE) {
122 		ops->crypt_blocks(state,
123 				  round_down(walk->nbytes, AEGIS128_BLOCK_SIZE),
124 				  walk->src.virt.addr, walk->dst.virt.addr);
125 		skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE);
126 	}
127 
128 	if (walk->nbytes) {
129 		ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr,
130 				walk->dst.virt.addr);
131 		skcipher_walk_done(walk, 0);
132 	}
133 }
134 
135 static struct aegis_ctx *crypto_aegis128_aesni_ctx(struct crypto_aead *aead)
136 {
137 	u8 *ctx = crypto_aead_ctx(aead);
138 	ctx = PTR_ALIGN(ctx, __alignof__(struct aegis_ctx));
139 	return (void *)ctx;
140 }
141 
142 static int crypto_aegis128_aesni_setkey(struct crypto_aead *aead, const u8 *key,
143 					unsigned int keylen)
144 {
145 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(aead);
146 
147 	if (keylen != AEGIS128_KEY_SIZE)
148 		return -EINVAL;
149 
150 	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
151 
152 	return 0;
153 }
154 
155 static int crypto_aegis128_aesni_setauthsize(struct crypto_aead *tfm,
156 						unsigned int authsize)
157 {
158 	if (authsize > AEGIS128_MAX_AUTH_SIZE)
159 		return -EINVAL;
160 	if (authsize < AEGIS128_MIN_AUTH_SIZE)
161 		return -EINVAL;
162 	return 0;
163 }
164 
165 static void crypto_aegis128_aesni_crypt(struct aead_request *req,
166 					struct aegis_block *tag_xor,
167 					unsigned int cryptlen,
168 					const struct aegis_crypt_ops *ops)
169 {
170 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
171 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm);
172 	struct skcipher_walk walk;
173 	struct aegis_state state;
174 
175 	ops->skcipher_walk_init(&walk, req, true);
176 
177 	kernel_fpu_begin();
178 
179 	crypto_aegis128_aesni_init(&state, ctx->key.bytes, req->iv);
180 	crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen);
181 	crypto_aegis128_aesni_process_crypt(&state, &walk, ops);
182 	crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen);
183 
184 	kernel_fpu_end();
185 }
186 
187 static int crypto_aegis128_aesni_encrypt(struct aead_request *req)
188 {
189 	static const struct aegis_crypt_ops OPS = {
190 		.skcipher_walk_init = skcipher_walk_aead_encrypt,
191 		.crypt_blocks = crypto_aegis128_aesni_enc,
192 		.crypt_tail = crypto_aegis128_aesni_enc_tail,
193 	};
194 
195 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
196 	struct aegis_block tag = {};
197 	unsigned int authsize = crypto_aead_authsize(tfm);
198 	unsigned int cryptlen = req->cryptlen;
199 
200 	crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS);
201 
202 	scatterwalk_map_and_copy(tag.bytes, req->dst,
203 				 req->assoclen + cryptlen, authsize, 1);
204 	return 0;
205 }
206 
207 static int crypto_aegis128_aesni_decrypt(struct aead_request *req)
208 {
209 	static const struct aegis_block zeros = {};
210 
211 	static const struct aegis_crypt_ops OPS = {
212 		.skcipher_walk_init = skcipher_walk_aead_decrypt,
213 		.crypt_blocks = crypto_aegis128_aesni_dec,
214 		.crypt_tail = crypto_aegis128_aesni_dec_tail,
215 	};
216 
217 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
218 	struct aegis_block tag;
219 	unsigned int authsize = crypto_aead_authsize(tfm);
220 	unsigned int cryptlen = req->cryptlen - authsize;
221 
222 	scatterwalk_map_and_copy(tag.bytes, req->src,
223 				 req->assoclen + cryptlen, authsize, 0);
224 
225 	crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS);
226 
227 	return crypto_memneq(tag.bytes, zeros.bytes, authsize) ? -EBADMSG : 0;
228 }
229 
230 static int crypto_aegis128_aesni_init_tfm(struct crypto_aead *aead)
231 {
232 	return 0;
233 }
234 
235 static void crypto_aegis128_aesni_exit_tfm(struct crypto_aead *aead)
236 {
237 }
238 
239 static struct aead_alg crypto_aegis128_aesni_alg = {
240 	.setkey = crypto_aegis128_aesni_setkey,
241 	.setauthsize = crypto_aegis128_aesni_setauthsize,
242 	.encrypt = crypto_aegis128_aesni_encrypt,
243 	.decrypt = crypto_aegis128_aesni_decrypt,
244 	.init = crypto_aegis128_aesni_init_tfm,
245 	.exit = crypto_aegis128_aesni_exit_tfm,
246 
247 	.ivsize = AEGIS128_NONCE_SIZE,
248 	.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
249 	.chunksize = AEGIS128_BLOCK_SIZE,
250 
251 	.base = {
252 		.cra_flags = CRYPTO_ALG_INTERNAL,
253 		.cra_blocksize = 1,
254 		.cra_ctxsize = sizeof(struct aegis_ctx) +
255 			       __alignof__(struct aegis_ctx),
256 		.cra_alignmask = 0,
257 		.cra_priority = 400,
258 
259 		.cra_name = "__aegis128",
260 		.cra_driver_name = "__aegis128-aesni",
261 
262 		.cra_module = THIS_MODULE,
263 	}
264 };
265 
266 static struct simd_aead_alg *simd_alg;
267 
268 static int __init crypto_aegis128_aesni_module_init(void)
269 {
270 	if (!boot_cpu_has(X86_FEATURE_XMM2) ||
271 	    !boot_cpu_has(X86_FEATURE_AES) ||
272 	    !cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL))
273 		return -ENODEV;
274 
275 	return simd_register_aeads_compat(&crypto_aegis128_aesni_alg, 1,
276 					  &simd_alg);
277 }
278 
279 static void __exit crypto_aegis128_aesni_module_exit(void)
280 {
281 	simd_unregister_aeads(&crypto_aegis128_aesni_alg, 1, &simd_alg);
282 }
283 
284 module_init(crypto_aegis128_aesni_module_init);
285 module_exit(crypto_aegis128_aesni_module_exit);
286 
287 MODULE_LICENSE("GPL");
288 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
289 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm -- AESNI+SSE2 implementation");
290 MODULE_ALIAS_CRYPTO("aegis128");
291 MODULE_ALIAS_CRYPTO("aegis128-aesni");
292