xref: /linux/crypto/ccm.c (revision 25489a4f556414445d342951615178368ee45cde)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * CCM: Counter with CBC-MAC
4  *
5  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6  */
7 
8 #include <crypto/internal/aead.h>
9 #include <crypto/internal/cipher.h>
10 #include <crypto/internal/hash.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/utils.h>
14 #include <linux/err.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 
20 struct ccm_instance_ctx {
21 	struct crypto_skcipher_spawn ctr;
22 	struct crypto_ahash_spawn mac;
23 };
24 
25 struct crypto_ccm_ctx {
26 	struct crypto_ahash *mac;
27 	struct crypto_skcipher *ctr;
28 };
29 
30 struct crypto_rfc4309_ctx {
31 	struct crypto_aead *child;
32 	u8 nonce[3];
33 };
34 
35 struct crypto_rfc4309_req_ctx {
36 	struct scatterlist src[3];
37 	struct scatterlist dst[3];
38 	struct aead_request subreq;
39 };
40 
41 struct crypto_ccm_req_priv_ctx {
42 	u8 odata[16];
43 	u8 idata[16];
44 	u8 auth_tag[16];
45 	u32 flags;
46 	struct scatterlist src[3];
47 	struct scatterlist dst[3];
48 	union {
49 		struct ahash_request ahreq;
50 		struct skcipher_request skreq;
51 	};
52 };
53 
54 struct cbcmac_tfm_ctx {
55 	struct crypto_cipher *child;
56 };
57 
58 static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
59 	struct aead_request *req)
60 {
61 	unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
62 
63 	return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
64 }
65 
66 static int set_msg_len(u8 *block, unsigned int msglen, int csize)
67 {
68 	__be32 data;
69 
70 	memset(block, 0, csize);
71 	block += csize;
72 
73 	if (csize >= 4)
74 		csize = 4;
75 	else if (msglen > (1 << (8 * csize)))
76 		return -EOVERFLOW;
77 
78 	data = cpu_to_be32(msglen);
79 	memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
80 
81 	return 0;
82 }
83 
84 static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
85 			     unsigned int keylen)
86 {
87 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
88 	struct crypto_skcipher *ctr = ctx->ctr;
89 	struct crypto_ahash *mac = ctx->mac;
90 	int err;
91 
92 	crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
93 	crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
94 				       CRYPTO_TFM_REQ_MASK);
95 	err = crypto_skcipher_setkey(ctr, key, keylen);
96 	if (err)
97 		return err;
98 
99 	crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
100 	crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
101 				    CRYPTO_TFM_REQ_MASK);
102 	return crypto_ahash_setkey(mac, key, keylen);
103 }
104 
105 static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
106 				  unsigned int authsize)
107 {
108 	switch (authsize) {
109 	case 4:
110 	case 6:
111 	case 8:
112 	case 10:
113 	case 12:
114 	case 14:
115 	case 16:
116 		break;
117 	default:
118 		return -EINVAL;
119 	}
120 
121 	return 0;
122 }
123 
124 static int format_input(u8 *info, struct aead_request *req,
125 			unsigned int cryptlen)
126 {
127 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
128 	unsigned int lp = req->iv[0];
129 	unsigned int l = lp + 1;
130 	unsigned int m;
131 
132 	m = crypto_aead_authsize(aead);
133 
134 	memcpy(info, req->iv, 16);
135 
136 	/* format control info per RFC 3610 and
137 	 * NIST Special Publication 800-38C
138 	 */
139 	*info |= (8 * ((m - 2) / 2));
140 	if (req->assoclen)
141 		*info |= 64;
142 
143 	return set_msg_len(info + 16 - l, cryptlen, l);
144 }
145 
146 static int format_adata(u8 *adata, unsigned int a)
147 {
148 	int len = 0;
149 
150 	/* add control info for associated data
151 	 * RFC 3610 and NIST Special Publication 800-38C
152 	 */
153 	if (a < 65280) {
154 		*(__be16 *)adata = cpu_to_be16(a);
155 		len = 2;
156 	} else  {
157 		*(__be16 *)adata = cpu_to_be16(0xfffe);
158 		*(__be32 *)&adata[2] = cpu_to_be32(a);
159 		len = 6;
160 	}
161 
162 	return len;
163 }
164 
165 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
166 			   unsigned int cryptlen)
167 {
168 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
169 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
170 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
171 	struct ahash_request *ahreq = &pctx->ahreq;
172 	unsigned int assoclen = req->assoclen;
173 	struct scatterlist sg[3];
174 	u8 *odata = pctx->odata;
175 	u8 *idata = pctx->idata;
176 	int ilen, err;
177 
178 	/* format control data for input */
179 	err = format_input(odata, req, cryptlen);
180 	if (err)
181 		goto out;
182 
183 	sg_init_table(sg, 3);
184 	sg_set_buf(&sg[0], odata, 16);
185 
186 	/* format associated data and compute into mac */
187 	if (assoclen) {
188 		ilen = format_adata(idata, assoclen);
189 		sg_set_buf(&sg[1], idata, ilen);
190 		sg_chain(sg, 3, req->src);
191 	} else {
192 		ilen = 0;
193 		sg_chain(sg, 2, req->src);
194 	}
195 
196 	ahash_request_set_tfm(ahreq, ctx->mac);
197 	ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
198 	ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
199 	err = crypto_ahash_init(ahreq);
200 	if (err)
201 		goto out;
202 	err = crypto_ahash_update(ahreq);
203 	if (err)
204 		goto out;
205 
206 	/* we need to pad the MAC input to a round multiple of the block size */
207 	ilen = 16 - (assoclen + ilen) % 16;
208 	if (ilen < 16) {
209 		memset(idata, 0, ilen);
210 		sg_init_table(sg, 2);
211 		sg_set_buf(&sg[0], idata, ilen);
212 		if (plain)
213 			sg_chain(sg, 2, plain);
214 		plain = sg;
215 		cryptlen += ilen;
216 	}
217 
218 	ahash_request_set_crypt(ahreq, plain, odata, cryptlen);
219 	err = crypto_ahash_finup(ahreq);
220 out:
221 	return err;
222 }
223 
224 static void crypto_ccm_encrypt_done(void *data, int err)
225 {
226 	struct aead_request *req = data;
227 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
228 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
229 	u8 *odata = pctx->odata;
230 
231 	if (!err)
232 		scatterwalk_map_and_copy(odata, req->dst,
233 					 req->assoclen + req->cryptlen,
234 					 crypto_aead_authsize(aead), 1);
235 	aead_request_complete(req, err);
236 }
237 
238 static inline int crypto_ccm_check_iv(const u8 *iv)
239 {
240 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
241 	if (1 > iv[0] || iv[0] > 7)
242 		return -EINVAL;
243 
244 	return 0;
245 }
246 
247 static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
248 {
249 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
250 	struct scatterlist *sg;
251 	u8 *iv = req->iv;
252 	int err;
253 
254 	err = crypto_ccm_check_iv(iv);
255 	if (err)
256 		return err;
257 
258 	pctx->flags = aead_request_flags(req);
259 
260 	 /* Note: rfc 3610 and NIST 800-38C require counter of
261 	 * zero to encrypt auth tag.
262 	 */
263 	memset(iv + 15 - iv[0], 0, iv[0] + 1);
264 
265 	sg_init_table(pctx->src, 3);
266 	sg_set_buf(pctx->src, tag, 16);
267 	sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
268 	if (sg != pctx->src + 1)
269 		sg_chain(pctx->src, 2, sg);
270 
271 	if (req->src != req->dst) {
272 		sg_init_table(pctx->dst, 3);
273 		sg_set_buf(pctx->dst, tag, 16);
274 		sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
275 		if (sg != pctx->dst + 1)
276 			sg_chain(pctx->dst, 2, sg);
277 	}
278 
279 	return 0;
280 }
281 
282 static int crypto_ccm_encrypt(struct aead_request *req)
283 {
284 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
285 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
286 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
287 	struct skcipher_request *skreq = &pctx->skreq;
288 	struct scatterlist *dst;
289 	unsigned int cryptlen = req->cryptlen;
290 	u8 *odata = pctx->odata;
291 	u8 *iv = req->iv;
292 	int err;
293 
294 	err = crypto_ccm_init_crypt(req, odata);
295 	if (err)
296 		return err;
297 
298 	err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
299 	if (err)
300 		return err;
301 
302 	dst = pctx->src;
303 	if (req->src != req->dst)
304 		dst = pctx->dst;
305 
306 	skcipher_request_set_tfm(skreq, ctx->ctr);
307 	skcipher_request_set_callback(skreq, pctx->flags,
308 				      crypto_ccm_encrypt_done, req);
309 	skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
310 	err = crypto_skcipher_encrypt(skreq);
311 	if (err)
312 		return err;
313 
314 	/* copy authtag to end of dst */
315 	scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
316 				 crypto_aead_authsize(aead), 1);
317 	return err;
318 }
319 
320 static void crypto_ccm_decrypt_done(void *data, int err)
321 {
322 	struct aead_request *req = data;
323 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
324 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
325 	unsigned int authsize = crypto_aead_authsize(aead);
326 	unsigned int cryptlen = req->cryptlen - authsize;
327 	struct scatterlist *dst;
328 
329 	pctx->flags = 0;
330 
331 	dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
332 
333 	if (!err) {
334 		err = crypto_ccm_auth(req, dst, cryptlen);
335 		if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
336 			err = -EBADMSG;
337 	}
338 	aead_request_complete(req, err);
339 }
340 
341 static int crypto_ccm_decrypt(struct aead_request *req)
342 {
343 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
344 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
345 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
346 	struct skcipher_request *skreq = &pctx->skreq;
347 	struct scatterlist *dst;
348 	unsigned int authsize = crypto_aead_authsize(aead);
349 	unsigned int cryptlen = req->cryptlen;
350 	u8 *authtag = pctx->auth_tag;
351 	u8 *odata = pctx->odata;
352 	u8 *iv = pctx->idata;
353 	int err;
354 
355 	cryptlen -= authsize;
356 
357 	err = crypto_ccm_init_crypt(req, authtag);
358 	if (err)
359 		return err;
360 
361 	scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
362 				 authsize, 0);
363 
364 	dst = pctx->src;
365 	if (req->src != req->dst)
366 		dst = pctx->dst;
367 
368 	memcpy(iv, req->iv, 16);
369 
370 	skcipher_request_set_tfm(skreq, ctx->ctr);
371 	skcipher_request_set_callback(skreq, pctx->flags,
372 				      crypto_ccm_decrypt_done, req);
373 	skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
374 	err = crypto_skcipher_decrypt(skreq);
375 	if (err)
376 		return err;
377 
378 	err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
379 	if (err)
380 		return err;
381 
382 	/* verify */
383 	if (crypto_memneq(authtag, odata, authsize))
384 		return -EBADMSG;
385 
386 	return err;
387 }
388 
389 static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
390 {
391 	struct aead_instance *inst = aead_alg_instance(tfm);
392 	struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
393 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
394 	struct crypto_ahash *mac;
395 	struct crypto_skcipher *ctr;
396 	unsigned long align;
397 	int err;
398 
399 	mac = crypto_spawn_ahash(&ictx->mac);
400 	if (IS_ERR(mac))
401 		return PTR_ERR(mac);
402 
403 	ctr = crypto_spawn_skcipher(&ictx->ctr);
404 	err = PTR_ERR(ctr);
405 	if (IS_ERR(ctr))
406 		goto err_free_mac;
407 
408 	ctx->mac = mac;
409 	ctx->ctr = ctr;
410 
411 	align = crypto_aead_alignmask(tfm);
412 	align &= ~(crypto_tfm_ctx_alignment() - 1);
413 	crypto_aead_set_reqsize(
414 		tfm,
415 		align + sizeof(struct crypto_ccm_req_priv_ctx) +
416 		max(crypto_ahash_reqsize(mac), crypto_skcipher_reqsize(ctr)));
417 
418 	return 0;
419 
420 err_free_mac:
421 	crypto_free_ahash(mac);
422 	return err;
423 }
424 
425 static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
426 {
427 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
428 
429 	crypto_free_ahash(ctx->mac);
430 	crypto_free_skcipher(ctx->ctr);
431 }
432 
433 static void crypto_ccm_free(struct aead_instance *inst)
434 {
435 	struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
436 
437 	crypto_drop_ahash(&ctx->mac);
438 	crypto_drop_skcipher(&ctx->ctr);
439 	kfree(inst);
440 }
441 
442 static int crypto_ccm_create_common(struct crypto_template *tmpl,
443 				    struct rtattr **tb,
444 				    const char *ctr_name,
445 				    const char *mac_name)
446 {
447 	struct skcipher_alg_common *ctr;
448 	u32 mask;
449 	struct aead_instance *inst;
450 	struct ccm_instance_ctx *ictx;
451 	struct hash_alg_common *mac;
452 	int err;
453 
454 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
455 	if (err)
456 		return err;
457 
458 	inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
459 	if (!inst)
460 		return -ENOMEM;
461 	ictx = aead_instance_ctx(inst);
462 
463 	err = crypto_grab_ahash(&ictx->mac, aead_crypto_instance(inst),
464 				mac_name, 0, mask | CRYPTO_ALG_ASYNC);
465 	if (err)
466 		goto err_free_inst;
467 	mac = crypto_spawn_ahash_alg(&ictx->mac);
468 
469 	err = -EINVAL;
470 	if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 ||
471 	    mac->digestsize != 16)
472 		goto err_free_inst;
473 
474 	err = crypto_grab_skcipher(&ictx->ctr, aead_crypto_instance(inst),
475 				   ctr_name, 0, mask);
476 	if (err)
477 		goto err_free_inst;
478 	ctr = crypto_spawn_skcipher_alg_common(&ictx->ctr);
479 
480 	/* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
481 	err = -EINVAL;
482 	if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
483 	    ctr->ivsize != 16 || ctr->base.cra_blocksize != 1)
484 		goto err_free_inst;
485 
486 	/* ctr and cbcmac must use the same underlying block cipher. */
487 	if (strcmp(ctr->base.cra_name + 4, mac->base.cra_name + 7) != 0)
488 		goto err_free_inst;
489 
490 	err = -ENAMETOOLONG;
491 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
492 		     "ccm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
493 		goto err_free_inst;
494 
495 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
496 		     "ccm_base(%s,%s)", ctr->base.cra_driver_name,
497 		     mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
498 		goto err_free_inst;
499 
500 	inst->alg.base.cra_priority = (mac->base.cra_priority +
501 				       ctr->base.cra_priority) / 2;
502 	inst->alg.base.cra_blocksize = 1;
503 	inst->alg.base.cra_alignmask = ctr->base.cra_alignmask;
504 	inst->alg.ivsize = 16;
505 	inst->alg.chunksize = ctr->chunksize;
506 	inst->alg.maxauthsize = 16;
507 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
508 	inst->alg.init = crypto_ccm_init_tfm;
509 	inst->alg.exit = crypto_ccm_exit_tfm;
510 	inst->alg.setkey = crypto_ccm_setkey;
511 	inst->alg.setauthsize = crypto_ccm_setauthsize;
512 	inst->alg.encrypt = crypto_ccm_encrypt;
513 	inst->alg.decrypt = crypto_ccm_decrypt;
514 
515 	inst->free = crypto_ccm_free;
516 
517 	err = aead_register_instance(tmpl, inst);
518 	if (err) {
519 err_free_inst:
520 		crypto_ccm_free(inst);
521 	}
522 	return err;
523 }
524 
525 static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
526 {
527 	const char *cipher_name;
528 	char ctr_name[CRYPTO_MAX_ALG_NAME];
529 	char mac_name[CRYPTO_MAX_ALG_NAME];
530 
531 	cipher_name = crypto_attr_alg_name(tb[1]);
532 	if (IS_ERR(cipher_name))
533 		return PTR_ERR(cipher_name);
534 
535 	if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
536 		     cipher_name) >= CRYPTO_MAX_ALG_NAME)
537 		return -ENAMETOOLONG;
538 
539 	if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
540 		     cipher_name) >= CRYPTO_MAX_ALG_NAME)
541 		return -ENAMETOOLONG;
542 
543 	return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
544 }
545 
546 static int crypto_ccm_base_create(struct crypto_template *tmpl,
547 				  struct rtattr **tb)
548 {
549 	const char *ctr_name;
550 	const char *mac_name;
551 
552 	ctr_name = crypto_attr_alg_name(tb[1]);
553 	if (IS_ERR(ctr_name))
554 		return PTR_ERR(ctr_name);
555 
556 	mac_name = crypto_attr_alg_name(tb[2]);
557 	if (IS_ERR(mac_name))
558 		return PTR_ERR(mac_name);
559 
560 	return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
561 }
562 
563 static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
564 				 unsigned int keylen)
565 {
566 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
567 	struct crypto_aead *child = ctx->child;
568 
569 	if (keylen < 3)
570 		return -EINVAL;
571 
572 	keylen -= 3;
573 	memcpy(ctx->nonce, key + keylen, 3);
574 
575 	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
576 	crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
577 				     CRYPTO_TFM_REQ_MASK);
578 	return crypto_aead_setkey(child, key, keylen);
579 }
580 
581 static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
582 				      unsigned int authsize)
583 {
584 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
585 
586 	switch (authsize) {
587 	case 8:
588 	case 12:
589 	case 16:
590 		break;
591 	default:
592 		return -EINVAL;
593 	}
594 
595 	return crypto_aead_setauthsize(ctx->child, authsize);
596 }
597 
598 static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
599 {
600 	struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
601 	struct aead_request *subreq = &rctx->subreq;
602 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
603 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
604 	struct crypto_aead *child = ctx->child;
605 	struct scatterlist *sg;
606 	u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
607 			   crypto_aead_alignmask(child) + 1);
608 
609 	/* L' */
610 	iv[0] = 3;
611 
612 	memcpy(iv + 1, ctx->nonce, 3);
613 	memcpy(iv + 4, req->iv, 8);
614 
615 	scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
616 
617 	sg_init_table(rctx->src, 3);
618 	sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
619 	sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
620 	if (sg != rctx->src + 1)
621 		sg_chain(rctx->src, 2, sg);
622 
623 	if (req->src != req->dst) {
624 		sg_init_table(rctx->dst, 3);
625 		sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
626 		sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
627 		if (sg != rctx->dst + 1)
628 			sg_chain(rctx->dst, 2, sg);
629 	}
630 
631 	aead_request_set_tfm(subreq, child);
632 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
633 				  req->base.data);
634 	aead_request_set_crypt(subreq, rctx->src,
635 			       req->src == req->dst ? rctx->src : rctx->dst,
636 			       req->cryptlen, iv);
637 	aead_request_set_ad(subreq, req->assoclen - 8);
638 
639 	return subreq;
640 }
641 
642 static int crypto_rfc4309_encrypt(struct aead_request *req)
643 {
644 	if (req->assoclen != 16 && req->assoclen != 20)
645 		return -EINVAL;
646 
647 	req = crypto_rfc4309_crypt(req);
648 
649 	return crypto_aead_encrypt(req);
650 }
651 
652 static int crypto_rfc4309_decrypt(struct aead_request *req)
653 {
654 	if (req->assoclen != 16 && req->assoclen != 20)
655 		return -EINVAL;
656 
657 	req = crypto_rfc4309_crypt(req);
658 
659 	return crypto_aead_decrypt(req);
660 }
661 
662 static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
663 {
664 	struct aead_instance *inst = aead_alg_instance(tfm);
665 	struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
666 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
667 	struct crypto_aead *aead;
668 	unsigned long align;
669 
670 	aead = crypto_spawn_aead(spawn);
671 	if (IS_ERR(aead))
672 		return PTR_ERR(aead);
673 
674 	ctx->child = aead;
675 
676 	align = crypto_aead_alignmask(aead);
677 	align &= ~(crypto_tfm_ctx_alignment() - 1);
678 	crypto_aead_set_reqsize(
679 		tfm,
680 		sizeof(struct crypto_rfc4309_req_ctx) +
681 		ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
682 		align + 32);
683 
684 	return 0;
685 }
686 
687 static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
688 {
689 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
690 
691 	crypto_free_aead(ctx->child);
692 }
693 
694 static void crypto_rfc4309_free(struct aead_instance *inst)
695 {
696 	crypto_drop_aead(aead_instance_ctx(inst));
697 	kfree(inst);
698 }
699 
700 static int crypto_rfc4309_create(struct crypto_template *tmpl,
701 				 struct rtattr **tb)
702 {
703 	u32 mask;
704 	struct aead_instance *inst;
705 	struct crypto_aead_spawn *spawn;
706 	struct aead_alg *alg;
707 	int err;
708 
709 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
710 	if (err)
711 		return err;
712 
713 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
714 	if (!inst)
715 		return -ENOMEM;
716 
717 	spawn = aead_instance_ctx(inst);
718 	err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
719 			       crypto_attr_alg_name(tb[1]), 0, mask);
720 	if (err)
721 		goto err_free_inst;
722 
723 	alg = crypto_spawn_aead_alg(spawn);
724 
725 	err = -EINVAL;
726 
727 	/* We only support 16-byte blocks. */
728 	if (crypto_aead_alg_ivsize(alg) != 16)
729 		goto err_free_inst;
730 
731 	/* Not a stream cipher? */
732 	if (alg->base.cra_blocksize != 1)
733 		goto err_free_inst;
734 
735 	err = -ENAMETOOLONG;
736 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
737 		     "rfc4309(%s)", alg->base.cra_name) >=
738 	    CRYPTO_MAX_ALG_NAME ||
739 	    snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
740 		     "rfc4309(%s)", alg->base.cra_driver_name) >=
741 	    CRYPTO_MAX_ALG_NAME)
742 		goto err_free_inst;
743 
744 	inst->alg.base.cra_priority = alg->base.cra_priority;
745 	inst->alg.base.cra_blocksize = 1;
746 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
747 
748 	inst->alg.ivsize = 8;
749 	inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
750 	inst->alg.maxauthsize = 16;
751 
752 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
753 
754 	inst->alg.init = crypto_rfc4309_init_tfm;
755 	inst->alg.exit = crypto_rfc4309_exit_tfm;
756 
757 	inst->alg.setkey = crypto_rfc4309_setkey;
758 	inst->alg.setauthsize = crypto_rfc4309_setauthsize;
759 	inst->alg.encrypt = crypto_rfc4309_encrypt;
760 	inst->alg.decrypt = crypto_rfc4309_decrypt;
761 
762 	inst->free = crypto_rfc4309_free;
763 
764 	err = aead_register_instance(tmpl, inst);
765 	if (err) {
766 err_free_inst:
767 		crypto_rfc4309_free(inst);
768 	}
769 	return err;
770 }
771 
772 static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
773 				     const u8 *inkey, unsigned int keylen)
774 {
775 	struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
776 
777 	return crypto_cipher_setkey(ctx->child, inkey, keylen);
778 }
779 
780 static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
781 {
782 	int bs = crypto_shash_digestsize(pdesc->tfm);
783 	u8 *dg = shash_desc_ctx(pdesc);
784 
785 	memset(dg, 0, bs);
786 	return 0;
787 }
788 
789 static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
790 				       unsigned int len)
791 {
792 	struct crypto_shash *parent = pdesc->tfm;
793 	struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
794 	struct crypto_cipher *tfm = tctx->child;
795 	int bs = crypto_shash_digestsize(parent);
796 	u8 *dg = shash_desc_ctx(pdesc);
797 
798 	do {
799 		crypto_xor(dg, p, bs);
800 		crypto_cipher_encrypt_one(tfm, dg, dg);
801 		p += bs;
802 		len -= bs;
803 	} while (len >= bs);
804 	return len;
805 }
806 
807 static int crypto_cbcmac_digest_finup(struct shash_desc *pdesc, const u8 *src,
808 				      unsigned int len, u8 *out)
809 {
810 	struct crypto_shash *parent = pdesc->tfm;
811 	struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
812 	struct crypto_cipher *tfm = tctx->child;
813 	int bs = crypto_shash_digestsize(parent);
814 	u8 *dg = shash_desc_ctx(pdesc);
815 
816 	if (len) {
817 		crypto_xor(dg, src, len);
818 		crypto_cipher_encrypt_one(tfm, out, dg);
819 		return 0;
820 	}
821 	memcpy(out, dg, bs);
822 	return 0;
823 }
824 
825 static int cbcmac_init_tfm(struct crypto_tfm *tfm)
826 {
827 	struct crypto_cipher *cipher;
828 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
829 	struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
830 	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
831 
832 	cipher = crypto_spawn_cipher(spawn);
833 	if (IS_ERR(cipher))
834 		return PTR_ERR(cipher);
835 
836 	ctx->child = cipher;
837 
838 	return 0;
839 };
840 
841 static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
842 {
843 	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
844 	crypto_free_cipher(ctx->child);
845 }
846 
847 static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
848 {
849 	struct shash_instance *inst;
850 	struct crypto_cipher_spawn *spawn;
851 	struct crypto_alg *alg;
852 	u32 mask;
853 	int err;
854 
855 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask);
856 	if (err)
857 		return err;
858 
859 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
860 	if (!inst)
861 		return -ENOMEM;
862 	spawn = shash_instance_ctx(inst);
863 
864 	err = crypto_grab_cipher(spawn, shash_crypto_instance(inst),
865 				 crypto_attr_alg_name(tb[1]), 0, mask);
866 	if (err)
867 		goto err_free_inst;
868 	alg = crypto_spawn_cipher_alg(spawn);
869 
870 	err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
871 	if (err)
872 		goto err_free_inst;
873 
874 	inst->alg.base.cra_priority = alg->cra_priority;
875 	inst->alg.base.cra_blocksize = alg->cra_blocksize;
876 
877 	inst->alg.digestsize = alg->cra_blocksize;
878 	inst->alg.descsize = alg->cra_blocksize;
879 
880 	inst->alg.base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY;
881 	inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
882 	inst->alg.base.cra_init = cbcmac_init_tfm;
883 	inst->alg.base.cra_exit = cbcmac_exit_tfm;
884 
885 	inst->alg.init = crypto_cbcmac_digest_init;
886 	inst->alg.update = crypto_cbcmac_digest_update;
887 	inst->alg.finup = crypto_cbcmac_digest_finup;
888 	inst->alg.setkey = crypto_cbcmac_digest_setkey;
889 
890 	inst->free = shash_free_singlespawn_instance;
891 
892 	err = shash_register_instance(tmpl, inst);
893 	if (err) {
894 err_free_inst:
895 		shash_free_singlespawn_instance(inst);
896 	}
897 	return err;
898 }
899 
900 static struct crypto_template crypto_ccm_tmpls[] = {
901 	{
902 		.name = "cbcmac",
903 		.create = cbcmac_create,
904 		.module = THIS_MODULE,
905 	}, {
906 		.name = "ccm_base",
907 		.create = crypto_ccm_base_create,
908 		.module = THIS_MODULE,
909 	}, {
910 		.name = "ccm",
911 		.create = crypto_ccm_create,
912 		.module = THIS_MODULE,
913 	}, {
914 		.name = "rfc4309",
915 		.create = crypto_rfc4309_create,
916 		.module = THIS_MODULE,
917 	},
918 };
919 
920 static int __init crypto_ccm_module_init(void)
921 {
922 	return crypto_register_templates(crypto_ccm_tmpls,
923 					 ARRAY_SIZE(crypto_ccm_tmpls));
924 }
925 
926 static void __exit crypto_ccm_module_exit(void)
927 {
928 	crypto_unregister_templates(crypto_ccm_tmpls,
929 				    ARRAY_SIZE(crypto_ccm_tmpls));
930 }
931 
932 module_init(crypto_ccm_module_init);
933 module_exit(crypto_ccm_module_exit);
934 
935 MODULE_LICENSE("GPL");
936 MODULE_DESCRIPTION("Counter with CBC MAC");
937 MODULE_ALIAS_CRYPTO("ccm_base");
938 MODULE_ALIAS_CRYPTO("rfc4309");
939 MODULE_ALIAS_CRYPTO("ccm");
940 MODULE_ALIAS_CRYPTO("cbcmac");
941 MODULE_IMPORT_NS("CRYPTO_INTERNAL");
942