xref: /linux/crypto/aead.c (revision f3449bf31d352f70c80a7993c272a7854ae98086)
1 /*
2  * AEAD: Authenticated Encryption with Associated Data
3  *
4  * This file provides API support for AEAD algorithms.
5  *
6  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License as published by the Free
10  * Software Foundation; either version 2 of the License, or (at your option)
11  * any later version.
12  *
13  */
14 
15 #include <crypto/internal/aead.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/rtnetlink.h>
21 #include <linux/sched.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
24 
25 #include "internal.h"
26 
27 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
28 			    unsigned int keylen)
29 {
30 	struct aead_alg *aead = crypto_aead_alg(tfm);
31 	unsigned long alignmask = crypto_aead_alignmask(tfm);
32 	int ret;
33 	u8 *buffer, *alignbuffer;
34 	unsigned long absize;
35 
36 	absize = keylen + alignmask;
37 	buffer = kmalloc(absize, GFP_ATOMIC);
38 	if (!buffer)
39 		return -ENOMEM;
40 
41 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
42 	memcpy(alignbuffer, key, keylen);
43 	ret = aead->setkey(tfm, alignbuffer, keylen);
44 	memset(alignbuffer, 0, keylen);
45 	kfree(buffer);
46 	return ret;
47 }
48 
49 static int setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
50 {
51 	struct aead_alg *aead = crypto_aead_alg(tfm);
52 	unsigned long alignmask = crypto_aead_alignmask(tfm);
53 
54 	if ((unsigned long)key & alignmask)
55 		return setkey_unaligned(tfm, key, keylen);
56 
57 	return aead->setkey(tfm, key, keylen);
58 }
59 
60 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
61 {
62 	struct aead_tfm *crt = crypto_aead_crt(tfm);
63 	int err;
64 
65 	if (authsize > crypto_aead_alg(tfm)->maxauthsize)
66 		return -EINVAL;
67 
68 	if (crypto_aead_alg(tfm)->setauthsize) {
69 		err = crypto_aead_alg(tfm)->setauthsize(crt->base, authsize);
70 		if (err)
71 			return err;
72 	}
73 
74 	crypto_aead_crt(crt->base)->authsize = authsize;
75 	crt->authsize = authsize;
76 	return 0;
77 }
78 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
79 
80 static unsigned int crypto_aead_ctxsize(struct crypto_alg *alg, u32 type,
81 					u32 mask)
82 {
83 	return alg->cra_ctxsize;
84 }
85 
86 static int no_givcrypt(struct aead_givcrypt_request *req)
87 {
88 	return -ENOSYS;
89 }
90 
91 static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
92 {
93 	struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
94 	struct aead_tfm *crt = &tfm->crt_aead;
95 
96 	if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
97 		return -EINVAL;
98 
99 	crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
100 		      alg->setkey : setkey;
101 	crt->encrypt = alg->encrypt;
102 	crt->decrypt = alg->decrypt;
103 	crt->givencrypt = alg->givencrypt ?: no_givcrypt;
104 	crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
105 	crt->base = __crypto_aead_cast(tfm);
106 	crt->ivsize = alg->ivsize;
107 	crt->authsize = alg->maxauthsize;
108 
109 	return 0;
110 }
111 
112 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
113 	__attribute__ ((unused));
114 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
115 {
116 	struct aead_alg *aead = &alg->cra_aead;
117 
118 	seq_printf(m, "type         : aead\n");
119 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
120 					     "yes" : "no");
121 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
122 	seq_printf(m, "ivsize       : %u\n", aead->ivsize);
123 	seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
124 	seq_printf(m, "geniv        : %s\n", aead->geniv ?: "<built-in>");
125 }
126 
127 const struct crypto_type crypto_aead_type = {
128 	.ctxsize = crypto_aead_ctxsize,
129 	.init = crypto_init_aead_ops,
130 #ifdef CONFIG_PROC_FS
131 	.show = crypto_aead_show,
132 #endif
133 };
134 EXPORT_SYMBOL_GPL(crypto_aead_type);
135 
136 static int aead_null_givencrypt(struct aead_givcrypt_request *req)
137 {
138 	return crypto_aead_encrypt(&req->areq);
139 }
140 
141 static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
142 {
143 	return crypto_aead_decrypt(&req->areq);
144 }
145 
146 static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
147 {
148 	struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
149 	struct aead_tfm *crt = &tfm->crt_aead;
150 
151 	if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
152 		return -EINVAL;
153 
154 	crt->setkey = setkey;
155 	crt->encrypt = alg->encrypt;
156 	crt->decrypt = alg->decrypt;
157 	if (!alg->ivsize) {
158 		crt->givencrypt = aead_null_givencrypt;
159 		crt->givdecrypt = aead_null_givdecrypt;
160 	}
161 	crt->base = __crypto_aead_cast(tfm);
162 	crt->ivsize = alg->ivsize;
163 	crt->authsize = alg->maxauthsize;
164 
165 	return 0;
166 }
167 
168 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
169 	__attribute__ ((unused));
170 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
171 {
172 	struct aead_alg *aead = &alg->cra_aead;
173 
174 	seq_printf(m, "type         : nivaead\n");
175 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
176 					     "yes" : "no");
177 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
178 	seq_printf(m, "ivsize       : %u\n", aead->ivsize);
179 	seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
180 	seq_printf(m, "geniv        : %s\n", aead->geniv);
181 }
182 
183 const struct crypto_type crypto_nivaead_type = {
184 	.ctxsize = crypto_aead_ctxsize,
185 	.init = crypto_init_nivaead_ops,
186 #ifdef CONFIG_PROC_FS
187 	.show = crypto_nivaead_show,
188 #endif
189 };
190 EXPORT_SYMBOL_GPL(crypto_nivaead_type);
191 
192 static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
193 			       const char *name, u32 type, u32 mask)
194 {
195 	struct crypto_alg *alg;
196 	int err;
197 
198 	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
199 	type |= CRYPTO_ALG_TYPE_AEAD;
200 	mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV;
201 
202 	alg = crypto_alg_mod_lookup(name, type, mask);
203 	if (IS_ERR(alg))
204 		return PTR_ERR(alg);
205 
206 	err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
207 	crypto_mod_put(alg);
208 	return err;
209 }
210 
211 struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
212 					 struct rtattr **tb, u32 type,
213 					 u32 mask)
214 {
215 	const char *name;
216 	struct crypto_aead_spawn *spawn;
217 	struct crypto_attr_type *algt;
218 	struct crypto_instance *inst;
219 	struct crypto_alg *alg;
220 	int err;
221 
222 	algt = crypto_get_attr_type(tb);
223 	err = PTR_ERR(algt);
224 	if (IS_ERR(algt))
225 		return ERR_PTR(err);
226 
227 	if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
228 	    algt->mask)
229 		return ERR_PTR(-EINVAL);
230 
231 	name = crypto_attr_alg_name(tb[1]);
232 	err = PTR_ERR(name);
233 	if (IS_ERR(name))
234 		return ERR_PTR(err);
235 
236 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
237 	if (!inst)
238 		return ERR_PTR(-ENOMEM);
239 
240 	spawn = crypto_instance_ctx(inst);
241 
242 	/* Ignore async algorithms if necessary. */
243 	mask |= crypto_requires_sync(algt->type, algt->mask);
244 
245 	crypto_set_aead_spawn(spawn, inst);
246 	err = crypto_grab_nivaead(spawn, name, type, mask);
247 	if (err)
248 		goto err_free_inst;
249 
250 	alg = crypto_aead_spawn_alg(spawn);
251 
252 	err = -EINVAL;
253 	if (!alg->cra_aead.ivsize)
254 		goto err_drop_alg;
255 
256 	/*
257 	 * This is only true if we're constructing an algorithm with its
258 	 * default IV generator.  For the default generator we elide the
259 	 * template name and double-check the IV generator.
260 	 */
261 	if (algt->mask & CRYPTO_ALG_GENIV) {
262 		if (strcmp(tmpl->name, alg->cra_aead.geniv))
263 			goto err_drop_alg;
264 
265 		memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
266 		memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
267 		       CRYPTO_MAX_ALG_NAME);
268 	} else {
269 		err = -ENAMETOOLONG;
270 		if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
271 			     "%s(%s)", tmpl->name, alg->cra_name) >=
272 		    CRYPTO_MAX_ALG_NAME)
273 			goto err_drop_alg;
274 		if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
275 			     "%s(%s)", tmpl->name, alg->cra_driver_name) >=
276 		    CRYPTO_MAX_ALG_NAME)
277 			goto err_drop_alg;
278 	}
279 
280 	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV;
281 	inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
282 	inst->alg.cra_priority = alg->cra_priority;
283 	inst->alg.cra_blocksize = alg->cra_blocksize;
284 	inst->alg.cra_alignmask = alg->cra_alignmask;
285 	inst->alg.cra_type = &crypto_aead_type;
286 
287 	inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
288 	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
289 	inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
290 
291 	inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
292 	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
293 	inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
294 	inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
295 
296 out:
297 	return inst;
298 
299 err_drop_alg:
300 	crypto_drop_aead(spawn);
301 err_free_inst:
302 	kfree(inst);
303 	inst = ERR_PTR(err);
304 	goto out;
305 }
306 EXPORT_SYMBOL_GPL(aead_geniv_alloc);
307 
308 void aead_geniv_free(struct crypto_instance *inst)
309 {
310 	crypto_drop_aead(crypto_instance_ctx(inst));
311 	kfree(inst);
312 }
313 EXPORT_SYMBOL_GPL(aead_geniv_free);
314 
315 int aead_geniv_init(struct crypto_tfm *tfm)
316 {
317 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
318 	struct crypto_aead *aead;
319 
320 	aead = crypto_spawn_aead(crypto_instance_ctx(inst));
321 	if (IS_ERR(aead))
322 		return PTR_ERR(aead);
323 
324 	tfm->crt_aead.base = aead;
325 	tfm->crt_aead.reqsize += crypto_aead_reqsize(aead);
326 
327 	return 0;
328 }
329 EXPORT_SYMBOL_GPL(aead_geniv_init);
330 
331 void aead_geniv_exit(struct crypto_tfm *tfm)
332 {
333 	crypto_free_aead(tfm->crt_aead.base);
334 }
335 EXPORT_SYMBOL_GPL(aead_geniv_exit);
336 
337 static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
338 {
339 	struct rtattr *tb[3];
340 	struct {
341 		struct rtattr attr;
342 		struct crypto_attr_type data;
343 	} ptype;
344 	struct {
345 		struct rtattr attr;
346 		struct crypto_attr_alg data;
347 	} palg;
348 	struct crypto_template *tmpl;
349 	struct crypto_instance *inst;
350 	struct crypto_alg *larval;
351 	const char *geniv;
352 	int err;
353 
354 	larval = crypto_larval_lookup(alg->cra_driver_name,
355 				      CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
356 				      CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
357 	err = PTR_ERR(larval);
358 	if (IS_ERR(larval))
359 		goto out;
360 
361 	err = -EAGAIN;
362 	if (!crypto_is_larval(larval))
363 		goto drop_larval;
364 
365 	ptype.attr.rta_len = sizeof(ptype);
366 	ptype.attr.rta_type = CRYPTOA_TYPE;
367 	ptype.data.type = type | CRYPTO_ALG_GENIV;
368 	/* GENIV tells the template that we're making a default geniv. */
369 	ptype.data.mask = mask | CRYPTO_ALG_GENIV;
370 	tb[0] = &ptype.attr;
371 
372 	palg.attr.rta_len = sizeof(palg);
373 	palg.attr.rta_type = CRYPTOA_ALG;
374 	/* Must use the exact name to locate ourselves. */
375 	memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
376 	tb[1] = &palg.attr;
377 
378 	tb[2] = NULL;
379 
380 	geniv = alg->cra_aead.geniv;
381 
382 	tmpl = crypto_lookup_template(geniv);
383 	err = -ENOENT;
384 	if (!tmpl)
385 		goto kill_larval;
386 
387 	inst = tmpl->alloc(tb);
388 	err = PTR_ERR(inst);
389 	if (IS_ERR(inst))
390 		goto put_tmpl;
391 
392 	if ((err = crypto_register_instance(tmpl, inst))) {
393 		tmpl->free(inst);
394 		goto put_tmpl;
395 	}
396 
397 	/* Redo the lookup to use the instance we just registered. */
398 	err = -EAGAIN;
399 
400 put_tmpl:
401 	crypto_tmpl_put(tmpl);
402 kill_larval:
403 	crypto_larval_kill(larval);
404 drop_larval:
405 	crypto_mod_put(larval);
406 out:
407 	crypto_mod_put(alg);
408 	return err;
409 }
410 
411 static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
412 					     u32 mask)
413 {
414 	struct crypto_alg *alg;
415 
416 	alg = crypto_alg_mod_lookup(name, type, mask);
417 	if (IS_ERR(alg))
418 		return alg;
419 
420 	if (alg->cra_type == &crypto_aead_type)
421 		return alg;
422 
423 	if (!alg->cra_aead.ivsize)
424 		return alg;
425 
426 	crypto_mod_put(alg);
427 	alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
428 				    mask & ~CRYPTO_ALG_TESTED);
429 	if (IS_ERR(alg))
430 		return alg;
431 
432 	if (alg->cra_type == &crypto_aead_type) {
433 		if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) {
434 			crypto_mod_put(alg);
435 			alg = ERR_PTR(-ENOENT);
436 		}
437 		return alg;
438 	}
439 
440 	BUG_ON(!alg->cra_aead.ivsize);
441 
442 	return ERR_PTR(crypto_nivaead_default(alg, type, mask));
443 }
444 
445 int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
446 		     u32 type, u32 mask)
447 {
448 	struct crypto_alg *alg;
449 	int err;
450 
451 	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
452 	type |= CRYPTO_ALG_TYPE_AEAD;
453 	mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
454 	mask |= CRYPTO_ALG_TYPE_MASK;
455 
456 	alg = crypto_lookup_aead(name, type, mask);
457 	if (IS_ERR(alg))
458 		return PTR_ERR(alg);
459 
460 	err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
461 	crypto_mod_put(alg);
462 	return err;
463 }
464 EXPORT_SYMBOL_GPL(crypto_grab_aead);
465 
466 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
467 {
468 	struct crypto_tfm *tfm;
469 	int err;
470 
471 	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
472 	type |= CRYPTO_ALG_TYPE_AEAD;
473 	mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
474 	mask |= CRYPTO_ALG_TYPE_MASK;
475 
476 	for (;;) {
477 		struct crypto_alg *alg;
478 
479 		alg = crypto_lookup_aead(alg_name, type, mask);
480 		if (IS_ERR(alg)) {
481 			err = PTR_ERR(alg);
482 			goto err;
483 		}
484 
485 		tfm = __crypto_alloc_tfm(alg, type, mask);
486 		if (!IS_ERR(tfm))
487 			return __crypto_aead_cast(tfm);
488 
489 		crypto_mod_put(alg);
490 		err = PTR_ERR(tfm);
491 
492 err:
493 		if (err != -EAGAIN)
494 			break;
495 		if (signal_pending(current)) {
496 			err = -EINTR;
497 			break;
498 		}
499 	}
500 
501 	return ERR_PTR(err);
502 }
503 EXPORT_SYMBOL_GPL(crypto_alloc_aead);
504 
505 MODULE_LICENSE("GPL");
506 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");
507