xref: /linux/crypto/gcm.c (revision 8fa5723aa7e053d498336b48448b292fc2e0458b)
1 /*
2  * GCM: Galois/Counter Mode.
3  *
4  * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published
8  * by the Free Software Foundation.
9  */
10 
11 #include <crypto/gf128mul.h>
12 #include <crypto/internal/aead.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <linux/completion.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/slab.h>
21 
22 struct gcm_instance_ctx {
23 	struct crypto_skcipher_spawn ctr;
24 };
25 
26 struct crypto_gcm_ctx {
27 	struct crypto_ablkcipher *ctr;
28 	struct gf128mul_4k *gf128;
29 };
30 
31 struct crypto_rfc4106_ctx {
32 	struct crypto_aead *child;
33 	u8 nonce[4];
34 };
35 
36 struct crypto_gcm_ghash_ctx {
37 	u32 bytes;
38 	u32 flags;
39 	struct gf128mul_4k *gf128;
40 	u8 buffer[16];
41 };
42 
43 struct crypto_gcm_req_priv_ctx {
44 	u8 auth_tag[16];
45 	u8 iauth_tag[16];
46 	struct scatterlist src[2];
47 	struct scatterlist dst[2];
48 	struct crypto_gcm_ghash_ctx ghash;
49 	struct ablkcipher_request abreq;
50 };
51 
52 struct crypto_gcm_setkey_result {
53 	int err;
54 	struct completion completion;
55 };
56 
57 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
58 	struct aead_request *req)
59 {
60 	unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
61 
62 	return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
63 }
64 
65 static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
66 				  struct gf128mul_4k *gf128)
67 {
68 	ctx->bytes = 0;
69 	ctx->flags = flags;
70 	ctx->gf128 = gf128;
71 	memset(ctx->buffer, 0, 16);
72 }
73 
74 static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
75 				    const u8 *src, unsigned int srclen)
76 {
77 	u8 *dst = ctx->buffer;
78 
79 	if (ctx->bytes) {
80 		int n = min(srclen, ctx->bytes);
81 		u8 *pos = dst + (16 - ctx->bytes);
82 
83 		ctx->bytes -= n;
84 		srclen -= n;
85 
86 		while (n--)
87 			*pos++ ^= *src++;
88 
89 		if (!ctx->bytes)
90 			gf128mul_4k_lle((be128 *)dst, ctx->gf128);
91 	}
92 
93 	while (srclen >= 16) {
94 		crypto_xor(dst, src, 16);
95 		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
96 		src += 16;
97 		srclen -= 16;
98 	}
99 
100 	if (srclen) {
101 		ctx->bytes = 16 - srclen;
102 		while (srclen--)
103 			*dst++ ^= *src++;
104 	}
105 }
106 
107 static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
108 				       struct scatterlist *sg, int len)
109 {
110 	struct scatter_walk walk;
111 	u8 *src;
112 	int n;
113 
114 	if (!len)
115 		return;
116 
117 	scatterwalk_start(&walk, sg);
118 
119 	while (len) {
120 		n = scatterwalk_clamp(&walk, len);
121 
122 		if (!n) {
123 			scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
124 			n = scatterwalk_clamp(&walk, len);
125 		}
126 
127 		src = scatterwalk_map(&walk, 0);
128 
129 		crypto_gcm_ghash_update(ctx, src, n);
130 		len -= n;
131 
132 		scatterwalk_unmap(src, 0);
133 		scatterwalk_advance(&walk, n);
134 		scatterwalk_done(&walk, 0, len);
135 		if (len)
136 			crypto_yield(ctx->flags);
137 	}
138 }
139 
140 static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
141 {
142 	u8 *dst = ctx->buffer;
143 
144 	if (ctx->bytes) {
145 		u8 *tmp = dst + (16 - ctx->bytes);
146 
147 		while (ctx->bytes--)
148 			*tmp++ ^= 0;
149 
150 		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
151 	}
152 
153 	ctx->bytes = 0;
154 }
155 
156 static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
157 				       unsigned int authlen,
158 				       unsigned int cryptlen, u8 *dst)
159 {
160 	u8 *buf = ctx->buffer;
161 	u128 lengths;
162 
163 	lengths.a = cpu_to_be64(authlen * 8);
164 	lengths.b = cpu_to_be64(cryptlen * 8);
165 
166 	crypto_gcm_ghash_flush(ctx);
167 	crypto_xor(buf, (u8 *)&lengths, 16);
168 	gf128mul_4k_lle((be128 *)buf, ctx->gf128);
169 	crypto_xor(dst, buf, 16);
170 }
171 
172 static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
173 {
174 	struct crypto_gcm_setkey_result *result = req->data;
175 
176 	if (err == -EINPROGRESS)
177 		return;
178 
179 	result->err = err;
180 	complete(&result->completion);
181 }
182 
183 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
184 			     unsigned int keylen)
185 {
186 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
187 	struct crypto_ablkcipher *ctr = ctx->ctr;
188 	struct {
189 		be128 hash;
190 		u8 iv[8];
191 
192 		struct crypto_gcm_setkey_result result;
193 
194 		struct scatterlist sg[1];
195 		struct ablkcipher_request req;
196 	} *data;
197 	int err;
198 
199 	crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
200 	crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
201 				   CRYPTO_TFM_REQ_MASK);
202 
203 	err = crypto_ablkcipher_setkey(ctr, key, keylen);
204 	if (err)
205 		return err;
206 
207 	crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
208 				       CRYPTO_TFM_RES_MASK);
209 
210 	data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
211 		       GFP_KERNEL);
212 	if (!data)
213 		return -ENOMEM;
214 
215 	init_completion(&data->result.completion);
216 	sg_init_one(data->sg, &data->hash, sizeof(data->hash));
217 	ablkcipher_request_set_tfm(&data->req, ctr);
218 	ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
219 						    CRYPTO_TFM_REQ_MAY_BACKLOG,
220 					crypto_gcm_setkey_done,
221 					&data->result);
222 	ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
223 				     sizeof(data->hash), data->iv);
224 
225 	err = crypto_ablkcipher_encrypt(&data->req);
226 	if (err == -EINPROGRESS || err == -EBUSY) {
227 		err = wait_for_completion_interruptible(
228 			&data->result.completion);
229 		if (!err)
230 			err = data->result.err;
231 	}
232 
233 	if (err)
234 		goto out;
235 
236 	if (ctx->gf128 != NULL)
237 		gf128mul_free_4k(ctx->gf128);
238 
239 	ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
240 
241 	if (ctx->gf128 == NULL)
242 		err = -ENOMEM;
243 
244 out:
245 	kfree(data);
246 	return err;
247 }
248 
249 static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
250 				  unsigned int authsize)
251 {
252 	switch (authsize) {
253 	case 4:
254 	case 8:
255 	case 12:
256 	case 13:
257 	case 14:
258 	case 15:
259 	case 16:
260 		break;
261 	default:
262 		return -EINVAL;
263 	}
264 
265 	return 0;
266 }
267 
268 static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
269 				  struct aead_request *req,
270 				  unsigned int cryptlen)
271 {
272 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
273 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
274 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
275 	u32 flags = req->base.tfm->crt_flags;
276 	struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
277 	struct scatterlist *dst;
278 	__be32 counter = cpu_to_be32(1);
279 
280 	memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
281 	memcpy(req->iv + 12, &counter, 4);
282 
283 	sg_init_table(pctx->src, 2);
284 	sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
285 	scatterwalk_sg_chain(pctx->src, 2, req->src);
286 
287 	dst = pctx->src;
288 	if (req->src != req->dst) {
289 		sg_init_table(pctx->dst, 2);
290 		sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
291 		scatterwalk_sg_chain(pctx->dst, 2, req->dst);
292 		dst = pctx->dst;
293 	}
294 
295 	ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
296 	ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
297 				     cryptlen + sizeof(pctx->auth_tag),
298 				     req->iv);
299 
300 	crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
301 
302 	crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
303 	crypto_gcm_ghash_flush(ghash);
304 }
305 
306 static int crypto_gcm_hash(struct aead_request *req)
307 {
308 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
309 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
310 	u8 *auth_tag = pctx->auth_tag;
311 	struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
312 
313 	crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
314 	crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
315 				   auth_tag);
316 
317 	scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
318 				 crypto_aead_authsize(aead), 1);
319 	return 0;
320 }
321 
322 static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
323 {
324 	struct aead_request *req = areq->data;
325 
326 	if (!err)
327 		err = crypto_gcm_hash(req);
328 
329 	aead_request_complete(req, err);
330 }
331 
332 static int crypto_gcm_encrypt(struct aead_request *req)
333 {
334 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
335 	struct ablkcipher_request *abreq = &pctx->abreq;
336 	int err;
337 
338 	crypto_gcm_init_crypt(abreq, req, req->cryptlen);
339 	ablkcipher_request_set_callback(abreq, aead_request_flags(req),
340 					crypto_gcm_encrypt_done, req);
341 
342 	err = crypto_ablkcipher_encrypt(abreq);
343 	if (err)
344 		return err;
345 
346 	return crypto_gcm_hash(req);
347 }
348 
349 static int crypto_gcm_verify(struct aead_request *req)
350 {
351 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
352 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
353 	struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
354 	u8 *auth_tag = pctx->auth_tag;
355 	u8 *iauth_tag = pctx->iauth_tag;
356 	unsigned int authsize = crypto_aead_authsize(aead);
357 	unsigned int cryptlen = req->cryptlen - authsize;
358 
359 	crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
360 
361 	authsize = crypto_aead_authsize(aead);
362 	scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
363 	return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
364 }
365 
366 static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
367 {
368 	struct aead_request *req = areq->data;
369 
370 	if (!err)
371 		err = crypto_gcm_verify(req);
372 
373 	aead_request_complete(req, err);
374 }
375 
376 static int crypto_gcm_decrypt(struct aead_request *req)
377 {
378 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
379 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
380 	struct ablkcipher_request *abreq = &pctx->abreq;
381 	struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
382 	unsigned int cryptlen = req->cryptlen;
383 	unsigned int authsize = crypto_aead_authsize(aead);
384 	int err;
385 
386 	if (cryptlen < authsize)
387 		return -EINVAL;
388 	cryptlen -= authsize;
389 
390 	crypto_gcm_init_crypt(abreq, req, cryptlen);
391 	ablkcipher_request_set_callback(abreq, aead_request_flags(req),
392 					crypto_gcm_decrypt_done, req);
393 
394 	crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
395 
396 	err = crypto_ablkcipher_decrypt(abreq);
397 	if (err)
398 		return err;
399 
400 	return crypto_gcm_verify(req);
401 }
402 
403 static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
404 {
405 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
406 	struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
407 	struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
408 	struct crypto_ablkcipher *ctr;
409 	unsigned long align;
410 	int err;
411 
412 	ctr = crypto_spawn_skcipher(&ictx->ctr);
413 	err = PTR_ERR(ctr);
414 	if (IS_ERR(ctr))
415 		return err;
416 
417 	ctx->ctr = ctr;
418 	ctx->gf128 = NULL;
419 
420 	align = crypto_tfm_alg_alignmask(tfm);
421 	align &= ~(crypto_tfm_ctx_alignment() - 1);
422 	tfm->crt_aead.reqsize = align +
423 				sizeof(struct crypto_gcm_req_priv_ctx) +
424 				crypto_ablkcipher_reqsize(ctr);
425 
426 	return 0;
427 }
428 
429 static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
430 {
431 	struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
432 
433 	if (ctx->gf128 != NULL)
434 		gf128mul_free_4k(ctx->gf128);
435 
436 	crypto_free_ablkcipher(ctx->ctr);
437 }
438 
439 static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
440 						       const char *full_name,
441 						       const char *ctr_name)
442 {
443 	struct crypto_attr_type *algt;
444 	struct crypto_instance *inst;
445 	struct crypto_alg *ctr;
446 	struct gcm_instance_ctx *ctx;
447 	int err;
448 
449 	algt = crypto_get_attr_type(tb);
450 	err = PTR_ERR(algt);
451 	if (IS_ERR(algt))
452 		return ERR_PTR(err);
453 
454 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
455 		return ERR_PTR(-EINVAL);
456 
457 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
458 	if (!inst)
459 		return ERR_PTR(-ENOMEM);
460 
461 	ctx = crypto_instance_ctx(inst);
462 	crypto_set_skcipher_spawn(&ctx->ctr, inst);
463 	err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
464 				   crypto_requires_sync(algt->type,
465 							algt->mask));
466 	if (err)
467 		goto err_free_inst;
468 
469 	ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
470 
471 	/* We only support 16-byte blocks. */
472 	if (ctr->cra_ablkcipher.ivsize != 16)
473 		goto out_put_ctr;
474 
475 	/* Not a stream cipher? */
476 	err = -EINVAL;
477 	if (ctr->cra_blocksize != 1)
478 		goto out_put_ctr;
479 
480 	err = -ENAMETOOLONG;
481 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
482 		     "gcm_base(%s)", ctr->cra_driver_name) >=
483 	    CRYPTO_MAX_ALG_NAME)
484 		goto out_put_ctr;
485 
486 	memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
487 
488 	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
489 	inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
490 	inst->alg.cra_priority = ctr->cra_priority;
491 	inst->alg.cra_blocksize = 1;
492 	inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
493 	inst->alg.cra_type = &crypto_aead_type;
494 	inst->alg.cra_aead.ivsize = 16;
495 	inst->alg.cra_aead.maxauthsize = 16;
496 	inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
497 	inst->alg.cra_init = crypto_gcm_init_tfm;
498 	inst->alg.cra_exit = crypto_gcm_exit_tfm;
499 	inst->alg.cra_aead.setkey = crypto_gcm_setkey;
500 	inst->alg.cra_aead.setauthsize = crypto_gcm_setauthsize;
501 	inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
502 	inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
503 
504 out:
505 	return inst;
506 
507 out_put_ctr:
508 	crypto_drop_skcipher(&ctx->ctr);
509 err_free_inst:
510 	kfree(inst);
511 	inst = ERR_PTR(err);
512 	goto out;
513 }
514 
515 static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
516 {
517 	int err;
518 	const char *cipher_name;
519 	char ctr_name[CRYPTO_MAX_ALG_NAME];
520 	char full_name[CRYPTO_MAX_ALG_NAME];
521 
522 	cipher_name = crypto_attr_alg_name(tb[1]);
523 	err = PTR_ERR(cipher_name);
524 	if (IS_ERR(cipher_name))
525 		return ERR_PTR(err);
526 
527 	if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
528 	    CRYPTO_MAX_ALG_NAME)
529 		return ERR_PTR(-ENAMETOOLONG);
530 
531 	if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
532 	    CRYPTO_MAX_ALG_NAME)
533 		return ERR_PTR(-ENAMETOOLONG);
534 
535 	return crypto_gcm_alloc_common(tb, full_name, ctr_name);
536 }
537 
538 static void crypto_gcm_free(struct crypto_instance *inst)
539 {
540 	struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
541 
542 	crypto_drop_skcipher(&ctx->ctr);
543 	kfree(inst);
544 }
545 
546 static struct crypto_template crypto_gcm_tmpl = {
547 	.name = "gcm",
548 	.alloc = crypto_gcm_alloc,
549 	.free = crypto_gcm_free,
550 	.module = THIS_MODULE,
551 };
552 
553 static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
554 {
555 	int err;
556 	const char *ctr_name;
557 	char full_name[CRYPTO_MAX_ALG_NAME];
558 
559 	ctr_name = crypto_attr_alg_name(tb[1]);
560 	err = PTR_ERR(ctr_name);
561 	if (IS_ERR(ctr_name))
562 		return ERR_PTR(err);
563 
564 	if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
565 		     ctr_name) >= CRYPTO_MAX_ALG_NAME)
566 		return ERR_PTR(-ENAMETOOLONG);
567 
568 	return crypto_gcm_alloc_common(tb, full_name, ctr_name);
569 }
570 
571 static struct crypto_template crypto_gcm_base_tmpl = {
572 	.name = "gcm_base",
573 	.alloc = crypto_gcm_base_alloc,
574 	.free = crypto_gcm_free,
575 	.module = THIS_MODULE,
576 };
577 
578 static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
579 				 unsigned int keylen)
580 {
581 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
582 	struct crypto_aead *child = ctx->child;
583 	int err;
584 
585 	if (keylen < 4)
586 		return -EINVAL;
587 
588 	keylen -= 4;
589 	memcpy(ctx->nonce, key + keylen, 4);
590 
591 	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
592 	crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
593 				     CRYPTO_TFM_REQ_MASK);
594 	err = crypto_aead_setkey(child, key, keylen);
595 	crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
596 				      CRYPTO_TFM_RES_MASK);
597 
598 	return err;
599 }
600 
601 static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
602 				      unsigned int authsize)
603 {
604 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
605 
606 	switch (authsize) {
607 	case 8:
608 	case 12:
609 	case 16:
610 		break;
611 	default:
612 		return -EINVAL;
613 	}
614 
615 	return crypto_aead_setauthsize(ctx->child, authsize);
616 }
617 
618 static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
619 {
620 	struct aead_request *subreq = aead_request_ctx(req);
621 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
622 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
623 	struct crypto_aead *child = ctx->child;
624 	u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
625 			   crypto_aead_alignmask(child) + 1);
626 
627 	memcpy(iv, ctx->nonce, 4);
628 	memcpy(iv + 4, req->iv, 8);
629 
630 	aead_request_set_tfm(subreq, child);
631 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
632 				  req->base.data);
633 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
634 	aead_request_set_assoc(subreq, req->assoc, req->assoclen);
635 
636 	return subreq;
637 }
638 
639 static int crypto_rfc4106_encrypt(struct aead_request *req)
640 {
641 	req = crypto_rfc4106_crypt(req);
642 
643 	return crypto_aead_encrypt(req);
644 }
645 
646 static int crypto_rfc4106_decrypt(struct aead_request *req)
647 {
648 	req = crypto_rfc4106_crypt(req);
649 
650 	return crypto_aead_decrypt(req);
651 }
652 
653 static int crypto_rfc4106_init_tfm(struct crypto_tfm *tfm)
654 {
655 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
656 	struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
657 	struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
658 	struct crypto_aead *aead;
659 	unsigned long align;
660 
661 	aead = crypto_spawn_aead(spawn);
662 	if (IS_ERR(aead))
663 		return PTR_ERR(aead);
664 
665 	ctx->child = aead;
666 
667 	align = crypto_aead_alignmask(aead);
668 	align &= ~(crypto_tfm_ctx_alignment() - 1);
669 	tfm->crt_aead.reqsize = sizeof(struct aead_request) +
670 				ALIGN(crypto_aead_reqsize(aead),
671 				      crypto_tfm_ctx_alignment()) +
672 				align + 16;
673 
674 	return 0;
675 }
676 
677 static void crypto_rfc4106_exit_tfm(struct crypto_tfm *tfm)
678 {
679 	struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
680 
681 	crypto_free_aead(ctx->child);
682 }
683 
684 static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
685 {
686 	struct crypto_attr_type *algt;
687 	struct crypto_instance *inst;
688 	struct crypto_aead_spawn *spawn;
689 	struct crypto_alg *alg;
690 	const char *ccm_name;
691 	int err;
692 
693 	algt = crypto_get_attr_type(tb);
694 	err = PTR_ERR(algt);
695 	if (IS_ERR(algt))
696 		return ERR_PTR(err);
697 
698 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
699 		return ERR_PTR(-EINVAL);
700 
701 	ccm_name = crypto_attr_alg_name(tb[1]);
702 	err = PTR_ERR(ccm_name);
703 	if (IS_ERR(ccm_name))
704 		return ERR_PTR(err);
705 
706 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
707 	if (!inst)
708 		return ERR_PTR(-ENOMEM);
709 
710 	spawn = crypto_instance_ctx(inst);
711 	crypto_set_aead_spawn(spawn, inst);
712 	err = crypto_grab_aead(spawn, ccm_name, 0,
713 			       crypto_requires_sync(algt->type, algt->mask));
714 	if (err)
715 		goto out_free_inst;
716 
717 	alg = crypto_aead_spawn_alg(spawn);
718 
719 	err = -EINVAL;
720 
721 	/* We only support 16-byte blocks. */
722 	if (alg->cra_aead.ivsize != 16)
723 		goto out_drop_alg;
724 
725 	/* Not a stream cipher? */
726 	if (alg->cra_blocksize != 1)
727 		goto out_drop_alg;
728 
729 	err = -ENAMETOOLONG;
730 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
731 		     "rfc4106(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
732 	    snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
733 		     "rfc4106(%s)", alg->cra_driver_name) >=
734 	    CRYPTO_MAX_ALG_NAME)
735 		goto out_drop_alg;
736 
737 	inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
738 	inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
739 	inst->alg.cra_priority = alg->cra_priority;
740 	inst->alg.cra_blocksize = 1;
741 	inst->alg.cra_alignmask = alg->cra_alignmask;
742 	inst->alg.cra_type = &crypto_nivaead_type;
743 
744 	inst->alg.cra_aead.ivsize = 8;
745 	inst->alg.cra_aead.maxauthsize = 16;
746 
747 	inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
748 
749 	inst->alg.cra_init = crypto_rfc4106_init_tfm;
750 	inst->alg.cra_exit = crypto_rfc4106_exit_tfm;
751 
752 	inst->alg.cra_aead.setkey = crypto_rfc4106_setkey;
753 	inst->alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize;
754 	inst->alg.cra_aead.encrypt = crypto_rfc4106_encrypt;
755 	inst->alg.cra_aead.decrypt = crypto_rfc4106_decrypt;
756 
757 	inst->alg.cra_aead.geniv = "seqiv";
758 
759 out:
760 	return inst;
761 
762 out_drop_alg:
763 	crypto_drop_aead(spawn);
764 out_free_inst:
765 	kfree(inst);
766 	inst = ERR_PTR(err);
767 	goto out;
768 }
769 
770 static void crypto_rfc4106_free(struct crypto_instance *inst)
771 {
772 	crypto_drop_spawn(crypto_instance_ctx(inst));
773 	kfree(inst);
774 }
775 
776 static struct crypto_template crypto_rfc4106_tmpl = {
777 	.name = "rfc4106",
778 	.alloc = crypto_rfc4106_alloc,
779 	.free = crypto_rfc4106_free,
780 	.module = THIS_MODULE,
781 };
782 
783 static int __init crypto_gcm_module_init(void)
784 {
785 	int err;
786 
787 	err = crypto_register_template(&crypto_gcm_base_tmpl);
788 	if (err)
789 		goto out;
790 
791 	err = crypto_register_template(&crypto_gcm_tmpl);
792 	if (err)
793 		goto out_undo_base;
794 
795 	err = crypto_register_template(&crypto_rfc4106_tmpl);
796 	if (err)
797 		goto out_undo_gcm;
798 
799 out:
800 	return err;
801 
802 out_undo_gcm:
803 	crypto_unregister_template(&crypto_gcm_tmpl);
804 out_undo_base:
805 	crypto_unregister_template(&crypto_gcm_base_tmpl);
806 	goto out;
807 }
808 
809 static void __exit crypto_gcm_module_exit(void)
810 {
811 	crypto_unregister_template(&crypto_rfc4106_tmpl);
812 	crypto_unregister_template(&crypto_gcm_tmpl);
813 	crypto_unregister_template(&crypto_gcm_base_tmpl);
814 }
815 
816 module_init(crypto_gcm_module_init);
817 module_exit(crypto_gcm_module_exit);
818 
819 MODULE_LICENSE("GPL");
820 MODULE_DESCRIPTION("Galois/Counter Mode");
821 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
822 MODULE_ALIAS("gcm_base");
823 MODULE_ALIAS("rfc4106");
824