xref: /linux/crypto/aead.c (revision f7ead7b47a758bbee6fdc66f95f27fdb866e5e9d)
1 /*
2  * AEAD: Authenticated Encryption with Associated Data
3  *
4  * This file provides API support for AEAD algorithms.
5  *
6  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms of the GNU General Public License as published by the Free
10  * Software Foundation; either version 2 of the License, or (at your option)
11  * any later version.
12  *
13  */
14 
15 #include <crypto/internal/aead.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/rtnetlink.h>
21 #include <linux/sched.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
24 #include <linux/cryptouser.h>
25 #include <net/netlink.h>
26 
27 #include "internal.h"
28 
29 static int aead_null_givencrypt(struct aead_givcrypt_request *req);
30 static int aead_null_givdecrypt(struct aead_givcrypt_request *req);
31 
32 static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
33 			    unsigned int keylen)
34 {
35 	struct aead_alg *aead = crypto_aead_alg(tfm);
36 	unsigned long alignmask = crypto_aead_alignmask(tfm);
37 	int ret;
38 	u8 *buffer, *alignbuffer;
39 	unsigned long absize;
40 
41 	absize = keylen + alignmask;
42 	buffer = kmalloc(absize, GFP_ATOMIC);
43 	if (!buffer)
44 		return -ENOMEM;
45 
46 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 	memcpy(alignbuffer, key, keylen);
48 	ret = aead->setkey(tfm, alignbuffer, keylen);
49 	memset(alignbuffer, 0, keylen);
50 	kfree(buffer);
51 	return ret;
52 }
53 
54 int crypto_aead_setkey(struct crypto_aead *tfm,
55 		       const u8 *key, unsigned int keylen)
56 {
57 	struct aead_alg *aead = crypto_aead_alg(tfm);
58 	unsigned long alignmask = crypto_aead_alignmask(tfm);
59 
60 	tfm = tfm->child;
61 
62 	if ((unsigned long)key & alignmask)
63 		return setkey_unaligned(tfm, key, keylen);
64 
65 	return aead->setkey(tfm, key, keylen);
66 }
67 EXPORT_SYMBOL_GPL(crypto_aead_setkey);
68 
69 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
70 {
71 	int err;
72 
73 	if (authsize > crypto_aead_alg(tfm)->maxauthsize)
74 		return -EINVAL;
75 
76 	if (crypto_aead_alg(tfm)->setauthsize) {
77 		err = crypto_aead_alg(tfm)->setauthsize(tfm->child, authsize);
78 		if (err)
79 			return err;
80 	}
81 
82 	tfm->child->authsize = authsize;
83 	tfm->authsize = authsize;
84 	return 0;
85 }
86 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
87 
88 static int no_givcrypt(struct aead_givcrypt_request *req)
89 {
90 	return -ENOSYS;
91 }
92 
93 static int crypto_aead_init_tfm(struct crypto_tfm *tfm)
94 {
95 	struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
96 	struct crypto_aead *crt = __crypto_aead_cast(tfm);
97 
98 	if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
99 		return -EINVAL;
100 
101 	crt->encrypt = alg->encrypt;
102 	crt->decrypt = alg->decrypt;
103 	if (alg->ivsize) {
104 		crt->givencrypt = alg->givencrypt ?: no_givcrypt;
105 		crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
106 	} else {
107 		crt->givencrypt = aead_null_givencrypt;
108 		crt->givdecrypt = aead_null_givdecrypt;
109 	}
110 	crt->child = __crypto_aead_cast(tfm);
111 	crt->ivsize = alg->ivsize;
112 	crt->authsize = alg->maxauthsize;
113 
114 	return 0;
115 }
116 
117 #ifdef CONFIG_NET
118 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
119 {
120 	struct crypto_report_aead raead;
121 	struct aead_alg *aead = &alg->cra_aead;
122 
123 	strncpy(raead.type, "aead", sizeof(raead.type));
124 	strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
125 
126 	raead.blocksize = alg->cra_blocksize;
127 	raead.maxauthsize = aead->maxauthsize;
128 	raead.ivsize = aead->ivsize;
129 
130 	if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
131 		    sizeof(struct crypto_report_aead), &raead))
132 		goto nla_put_failure;
133 	return 0;
134 
135 nla_put_failure:
136 	return -EMSGSIZE;
137 }
138 #else
139 static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
140 {
141 	return -ENOSYS;
142 }
143 #endif
144 
145 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
146 	__attribute__ ((unused));
147 static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
148 {
149 	struct aead_alg *aead = &alg->cra_aead;
150 
151 	seq_printf(m, "type         : aead\n");
152 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
153 					     "yes" : "no");
154 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
155 	seq_printf(m, "ivsize       : %u\n", aead->ivsize);
156 	seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
157 	seq_printf(m, "geniv        : %s\n", aead->geniv ?: "<built-in>");
158 }
159 
160 const struct crypto_type crypto_aead_type = {
161 	.extsize = crypto_alg_extsize,
162 	.init_tfm = crypto_aead_init_tfm,
163 #ifdef CONFIG_PROC_FS
164 	.show = crypto_aead_show,
165 #endif
166 	.report = crypto_aead_report,
167 	.lookup = crypto_lookup_aead,
168 	.maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
169 	.maskset = CRYPTO_ALG_TYPE_MASK,
170 	.type = CRYPTO_ALG_TYPE_AEAD,
171 	.tfmsize = offsetof(struct crypto_aead, base),
172 };
173 EXPORT_SYMBOL_GPL(crypto_aead_type);
174 
175 static int aead_null_givencrypt(struct aead_givcrypt_request *req)
176 {
177 	return crypto_aead_encrypt(&req->areq);
178 }
179 
180 static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
181 {
182 	return crypto_aead_decrypt(&req->areq);
183 }
184 
185 #ifdef CONFIG_NET
186 static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
187 {
188 	struct crypto_report_aead raead;
189 	struct aead_alg *aead = &alg->cra_aead;
190 
191 	strncpy(raead.type, "nivaead", sizeof(raead.type));
192 	strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
193 
194 	raead.blocksize = alg->cra_blocksize;
195 	raead.maxauthsize = aead->maxauthsize;
196 	raead.ivsize = aead->ivsize;
197 
198 	if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
199 		    sizeof(struct crypto_report_aead), &raead))
200 		goto nla_put_failure;
201 	return 0;
202 
203 nla_put_failure:
204 	return -EMSGSIZE;
205 }
206 #else
207 static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
208 {
209 	return -ENOSYS;
210 }
211 #endif
212 
213 
214 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
215 	__attribute__ ((unused));
216 static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
217 {
218 	struct aead_alg *aead = &alg->cra_aead;
219 
220 	seq_printf(m, "type         : nivaead\n");
221 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
222 					     "yes" : "no");
223 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
224 	seq_printf(m, "ivsize       : %u\n", aead->ivsize);
225 	seq_printf(m, "maxauthsize  : %u\n", aead->maxauthsize);
226 	seq_printf(m, "geniv        : %s\n", aead->geniv);
227 }
228 
229 const struct crypto_type crypto_nivaead_type = {
230 	.extsize = crypto_alg_extsize,
231 	.init_tfm = crypto_aead_init_tfm,
232 #ifdef CONFIG_PROC_FS
233 	.show = crypto_nivaead_show,
234 #endif
235 	.report = crypto_nivaead_report,
236 	.maskclear = ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV),
237 	.maskset = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV,
238 	.type = CRYPTO_ALG_TYPE_AEAD,
239 	.tfmsize = offsetof(struct crypto_aead, base),
240 };
241 EXPORT_SYMBOL_GPL(crypto_nivaead_type);
242 
243 static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
244 			       const char *name, u32 type, u32 mask)
245 {
246 	spawn->base.frontend = &crypto_nivaead_type;
247 	return crypto_grab_spawn(&spawn->base, name, type, mask);
248 }
249 
250 struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
251 					 struct rtattr **tb, u32 type,
252 					 u32 mask)
253 {
254 	const char *name;
255 	struct crypto_aead_spawn *spawn;
256 	struct crypto_attr_type *algt;
257 	struct crypto_instance *inst;
258 	struct crypto_alg *alg;
259 	int err;
260 
261 	algt = crypto_get_attr_type(tb);
262 	if (IS_ERR(algt))
263 		return ERR_CAST(algt);
264 
265 	if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
266 	    algt->mask)
267 		return ERR_PTR(-EINVAL);
268 
269 	name = crypto_attr_alg_name(tb[1]);
270 	if (IS_ERR(name))
271 		return ERR_CAST(name);
272 
273 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
274 	if (!inst)
275 		return ERR_PTR(-ENOMEM);
276 
277 	spawn = crypto_instance_ctx(inst);
278 
279 	/* Ignore async algorithms if necessary. */
280 	mask |= crypto_requires_sync(algt->type, algt->mask);
281 
282 	crypto_set_aead_spawn(spawn, inst);
283 	err = crypto_grab_nivaead(spawn, name, type, mask);
284 	if (err)
285 		goto err_free_inst;
286 
287 	alg = crypto_aead_spawn_alg(spawn);
288 
289 	err = -EINVAL;
290 	if (!alg->cra_aead.ivsize)
291 		goto err_drop_alg;
292 
293 	/*
294 	 * This is only true if we're constructing an algorithm with its
295 	 * default IV generator.  For the default generator we elide the
296 	 * template name and double-check the IV generator.
297 	 */
298 	if (algt->mask & CRYPTO_ALG_GENIV) {
299 		if (strcmp(tmpl->name, alg->cra_aead.geniv))
300 			goto err_drop_alg;
301 
302 		memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
303 		memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
304 		       CRYPTO_MAX_ALG_NAME);
305 	} else {
306 		err = -ENAMETOOLONG;
307 		if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
308 			     "%s(%s)", tmpl->name, alg->cra_name) >=
309 		    CRYPTO_MAX_ALG_NAME)
310 			goto err_drop_alg;
311 		if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
312 			     "%s(%s)", tmpl->name, alg->cra_driver_name) >=
313 		    CRYPTO_MAX_ALG_NAME)
314 			goto err_drop_alg;
315 	}
316 
317 	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV;
318 	inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
319 	inst->alg.cra_priority = alg->cra_priority;
320 	inst->alg.cra_blocksize = alg->cra_blocksize;
321 	inst->alg.cra_alignmask = alg->cra_alignmask;
322 	inst->alg.cra_type = &crypto_aead_type;
323 
324 	inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
325 	inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
326 	inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
327 
328 	inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
329 	inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
330 	inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
331 	inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
332 
333 out:
334 	return inst;
335 
336 err_drop_alg:
337 	crypto_drop_aead(spawn);
338 err_free_inst:
339 	kfree(inst);
340 	inst = ERR_PTR(err);
341 	goto out;
342 }
343 EXPORT_SYMBOL_GPL(aead_geniv_alloc);
344 
345 void aead_geniv_free(struct crypto_instance *inst)
346 {
347 	crypto_drop_aead(crypto_instance_ctx(inst));
348 	kfree(inst);
349 }
350 EXPORT_SYMBOL_GPL(aead_geniv_free);
351 
352 int aead_geniv_init(struct crypto_tfm *tfm)
353 {
354 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
355 	struct crypto_aead *child;
356 	struct crypto_aead *aead;
357 
358 	aead = __crypto_aead_cast(tfm);
359 
360 	child = crypto_spawn_aead(crypto_instance_ctx(inst));
361 	if (IS_ERR(child))
362 		return PTR_ERR(child);
363 
364 	aead->child = child;
365 	aead->reqsize += crypto_aead_reqsize(child);
366 
367 	return 0;
368 }
369 EXPORT_SYMBOL_GPL(aead_geniv_init);
370 
371 void aead_geniv_exit(struct crypto_tfm *tfm)
372 {
373 	crypto_free_aead(__crypto_aead_cast(tfm)->child);
374 }
375 EXPORT_SYMBOL_GPL(aead_geniv_exit);
376 
377 static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
378 {
379 	struct rtattr *tb[3];
380 	struct {
381 		struct rtattr attr;
382 		struct crypto_attr_type data;
383 	} ptype;
384 	struct {
385 		struct rtattr attr;
386 		struct crypto_attr_alg data;
387 	} palg;
388 	struct crypto_template *tmpl;
389 	struct crypto_instance *inst;
390 	struct crypto_alg *larval;
391 	const char *geniv;
392 	int err;
393 
394 	larval = crypto_larval_lookup(alg->cra_driver_name,
395 				      CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
396 				      CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
397 	err = PTR_ERR(larval);
398 	if (IS_ERR(larval))
399 		goto out;
400 
401 	err = -EAGAIN;
402 	if (!crypto_is_larval(larval))
403 		goto drop_larval;
404 
405 	ptype.attr.rta_len = sizeof(ptype);
406 	ptype.attr.rta_type = CRYPTOA_TYPE;
407 	ptype.data.type = type | CRYPTO_ALG_GENIV;
408 	/* GENIV tells the template that we're making a default geniv. */
409 	ptype.data.mask = mask | CRYPTO_ALG_GENIV;
410 	tb[0] = &ptype.attr;
411 
412 	palg.attr.rta_len = sizeof(palg);
413 	palg.attr.rta_type = CRYPTOA_ALG;
414 	/* Must use the exact name to locate ourselves. */
415 	memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
416 	tb[1] = &palg.attr;
417 
418 	tb[2] = NULL;
419 
420 	geniv = alg->cra_aead.geniv;
421 
422 	tmpl = crypto_lookup_template(geniv);
423 	err = -ENOENT;
424 	if (!tmpl)
425 		goto kill_larval;
426 
427 	inst = tmpl->alloc(tb);
428 	err = PTR_ERR(inst);
429 	if (IS_ERR(inst))
430 		goto put_tmpl;
431 
432 	err = crypto_register_instance(tmpl, inst);
433 	if (err) {
434 		tmpl->free(inst);
435 		goto put_tmpl;
436 	}
437 
438 	/* Redo the lookup to use the instance we just registered. */
439 	err = -EAGAIN;
440 
441 put_tmpl:
442 	crypto_tmpl_put(tmpl);
443 kill_larval:
444 	crypto_larval_kill(larval);
445 drop_larval:
446 	crypto_mod_put(larval);
447 out:
448 	crypto_mod_put(alg);
449 	return err;
450 }
451 
452 struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
453 {
454 	struct crypto_alg *alg;
455 
456 	alg = crypto_alg_mod_lookup(name, type, mask);
457 	if (IS_ERR(alg))
458 		return alg;
459 
460 	if (alg->cra_type == &crypto_aead_type)
461 		return alg;
462 
463 	if (!alg->cra_aead.ivsize)
464 		return alg;
465 
466 	crypto_mod_put(alg);
467 	alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
468 				    mask & ~CRYPTO_ALG_TESTED);
469 	if (IS_ERR(alg))
470 		return alg;
471 
472 	if (alg->cra_type == &crypto_aead_type) {
473 		if (~alg->cra_flags & (type ^ ~mask) & CRYPTO_ALG_TESTED) {
474 			crypto_mod_put(alg);
475 			alg = ERR_PTR(-ENOENT);
476 		}
477 		return alg;
478 	}
479 
480 	BUG_ON(!alg->cra_aead.ivsize);
481 
482 	return ERR_PTR(crypto_nivaead_default(alg, type, mask));
483 }
484 EXPORT_SYMBOL_GPL(crypto_lookup_aead);
485 
486 int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
487 		     u32 type, u32 mask)
488 {
489 	spawn->base.frontend = &crypto_aead_type;
490 	return crypto_grab_spawn(&spawn->base, name, type, mask);
491 }
492 EXPORT_SYMBOL_GPL(crypto_grab_aead);
493 
494 struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
495 {
496 	return crypto_alloc_tfm(alg_name, &crypto_aead_type, type, mask);
497 }
498 EXPORT_SYMBOL_GPL(crypto_alloc_aead);
499 
500 MODULE_LICENSE("GPL");
501 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");
502