1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * CCM: Counter with CBC-MAC 4 * 5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> 6 */ 7 8 #include <crypto/internal/aead.h> 9 #include <crypto/internal/hash.h> 10 #include <crypto/internal/skcipher.h> 11 #include <crypto/scatterwalk.h> 12 #include <linux/err.h> 13 #include <linux/init.h> 14 #include <linux/kernel.h> 15 #include <linux/module.h> 16 #include <linux/slab.h> 17 18 #include "internal.h" 19 20 struct ccm_instance_ctx { 21 struct crypto_skcipher_spawn ctr; 22 struct crypto_ahash_spawn mac; 23 }; 24 25 struct crypto_ccm_ctx { 26 struct crypto_ahash *mac; 27 struct crypto_skcipher *ctr; 28 }; 29 30 struct crypto_rfc4309_ctx { 31 struct crypto_aead *child; 32 u8 nonce[3]; 33 }; 34 35 struct crypto_rfc4309_req_ctx { 36 struct scatterlist src[3]; 37 struct scatterlist dst[3]; 38 struct aead_request subreq; 39 }; 40 41 struct crypto_ccm_req_priv_ctx { 42 u8 odata[16]; 43 u8 idata[16]; 44 u8 auth_tag[16]; 45 u32 flags; 46 struct scatterlist src[3]; 47 struct scatterlist dst[3]; 48 union { 49 struct ahash_request ahreq; 50 struct skcipher_request skreq; 51 }; 52 }; 53 54 struct cbcmac_tfm_ctx { 55 struct crypto_cipher *child; 56 }; 57 58 struct cbcmac_desc_ctx { 59 unsigned int len; 60 }; 61 62 static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx( 63 struct aead_request *req) 64 { 65 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); 66 67 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 68 } 69 70 static int set_msg_len(u8 *block, unsigned int msglen, int csize) 71 { 72 __be32 data; 73 74 memset(block, 0, csize); 75 block += csize; 76 77 if (csize >= 4) 78 csize = 4; 79 else if (msglen > (1 << (8 * csize))) 80 return -EOVERFLOW; 81 82 data = cpu_to_be32(msglen); 83 memcpy(block - csize, (u8 *)&data + 4 - csize, csize); 84 85 return 0; 86 } 87 88 static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, 89 unsigned int keylen) 90 { 91 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 92 struct crypto_skcipher *ctr = ctx->ctr; 93 struct crypto_ahash *mac = ctx->mac; 94 int err = 0; 95 96 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); 97 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) & 98 CRYPTO_TFM_REQ_MASK); 99 err = crypto_skcipher_setkey(ctr, key, keylen); 100 crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) & 101 CRYPTO_TFM_RES_MASK); 102 if (err) 103 goto out; 104 105 crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK); 106 crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) & 107 CRYPTO_TFM_REQ_MASK); 108 err = crypto_ahash_setkey(mac, key, keylen); 109 crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) & 110 CRYPTO_TFM_RES_MASK); 111 112 out: 113 return err; 114 } 115 116 static int crypto_ccm_setauthsize(struct crypto_aead *tfm, 117 unsigned int authsize) 118 { 119 switch (authsize) { 120 case 4: 121 case 6: 122 case 8: 123 case 10: 124 case 12: 125 case 14: 126 case 16: 127 break; 128 default: 129 return -EINVAL; 130 } 131 132 return 0; 133 } 134 135 static int format_input(u8 *info, struct aead_request *req, 136 unsigned int cryptlen) 137 { 138 struct crypto_aead *aead = crypto_aead_reqtfm(req); 139 unsigned int lp = req->iv[0]; 140 unsigned int l = lp + 1; 141 unsigned int m; 142 143 m = crypto_aead_authsize(aead); 144 145 memcpy(info, req->iv, 16); 146 147 /* format control info per RFC 3610 and 148 * NIST Special Publication 800-38C 149 */ 150 *info |= (8 * ((m - 2) / 2)); 151 if (req->assoclen) 152 *info |= 64; 153 154 return set_msg_len(info + 16 - l, cryptlen, l); 155 } 156 157 static int format_adata(u8 *adata, unsigned int a) 158 { 159 int len = 0; 160 161 /* add control info for associated data 162 * RFC 3610 and NIST Special Publication 800-38C 163 */ 164 if (a < 65280) { 165 *(__be16 *)adata = cpu_to_be16(a); 166 len = 2; 167 } else { 168 *(__be16 *)adata = cpu_to_be16(0xfffe); 169 *(__be32 *)&adata[2] = cpu_to_be32(a); 170 len = 6; 171 } 172 173 return len; 174 } 175 176 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, 177 unsigned int cryptlen) 178 { 179 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 180 struct crypto_aead *aead = crypto_aead_reqtfm(req); 181 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 182 struct ahash_request *ahreq = &pctx->ahreq; 183 unsigned int assoclen = req->assoclen; 184 struct scatterlist sg[3]; 185 u8 *odata = pctx->odata; 186 u8 *idata = pctx->idata; 187 int ilen, err; 188 189 /* format control data for input */ 190 err = format_input(odata, req, cryptlen); 191 if (err) 192 goto out; 193 194 sg_init_table(sg, 3); 195 sg_set_buf(&sg[0], odata, 16); 196 197 /* format associated data and compute into mac */ 198 if (assoclen) { 199 ilen = format_adata(idata, assoclen); 200 sg_set_buf(&sg[1], idata, ilen); 201 sg_chain(sg, 3, req->src); 202 } else { 203 ilen = 0; 204 sg_chain(sg, 2, req->src); 205 } 206 207 ahash_request_set_tfm(ahreq, ctx->mac); 208 ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL); 209 ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16); 210 err = crypto_ahash_init(ahreq); 211 if (err) 212 goto out; 213 err = crypto_ahash_update(ahreq); 214 if (err) 215 goto out; 216 217 /* we need to pad the MAC input to a round multiple of the block size */ 218 ilen = 16 - (assoclen + ilen) % 16; 219 if (ilen < 16) { 220 memset(idata, 0, ilen); 221 sg_init_table(sg, 2); 222 sg_set_buf(&sg[0], idata, ilen); 223 if (plain) 224 sg_chain(sg, 2, plain); 225 plain = sg; 226 cryptlen += ilen; 227 } 228 229 ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); 230 err = crypto_ahash_finup(ahreq); 231 out: 232 return err; 233 } 234 235 static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err) 236 { 237 struct aead_request *req = areq->data; 238 struct crypto_aead *aead = crypto_aead_reqtfm(req); 239 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 240 u8 *odata = pctx->odata; 241 242 if (!err) 243 scatterwalk_map_and_copy(odata, req->dst, 244 req->assoclen + req->cryptlen, 245 crypto_aead_authsize(aead), 1); 246 aead_request_complete(req, err); 247 } 248 249 static inline int crypto_ccm_check_iv(const u8 *iv) 250 { 251 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 252 if (1 > iv[0] || iv[0] > 7) 253 return -EINVAL; 254 255 return 0; 256 } 257 258 static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag) 259 { 260 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 261 struct scatterlist *sg; 262 u8 *iv = req->iv; 263 int err; 264 265 err = crypto_ccm_check_iv(iv); 266 if (err) 267 return err; 268 269 pctx->flags = aead_request_flags(req); 270 271 /* Note: rfc 3610 and NIST 800-38C require counter of 272 * zero to encrypt auth tag. 273 */ 274 memset(iv + 15 - iv[0], 0, iv[0] + 1); 275 276 sg_init_table(pctx->src, 3); 277 sg_set_buf(pctx->src, tag, 16); 278 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); 279 if (sg != pctx->src + 1) 280 sg_chain(pctx->src, 2, sg); 281 282 if (req->src != req->dst) { 283 sg_init_table(pctx->dst, 3); 284 sg_set_buf(pctx->dst, tag, 16); 285 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); 286 if (sg != pctx->dst + 1) 287 sg_chain(pctx->dst, 2, sg); 288 } 289 290 return 0; 291 } 292 293 static int crypto_ccm_encrypt(struct aead_request *req) 294 { 295 struct crypto_aead *aead = crypto_aead_reqtfm(req); 296 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 297 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 298 struct skcipher_request *skreq = &pctx->skreq; 299 struct scatterlist *dst; 300 unsigned int cryptlen = req->cryptlen; 301 u8 *odata = pctx->odata; 302 u8 *iv = req->iv; 303 int err; 304 305 err = crypto_ccm_init_crypt(req, odata); 306 if (err) 307 return err; 308 309 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); 310 if (err) 311 return err; 312 313 dst = pctx->src; 314 if (req->src != req->dst) 315 dst = pctx->dst; 316 317 skcipher_request_set_tfm(skreq, ctx->ctr); 318 skcipher_request_set_callback(skreq, pctx->flags, 319 crypto_ccm_encrypt_done, req); 320 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); 321 err = crypto_skcipher_encrypt(skreq); 322 if (err) 323 return err; 324 325 /* copy authtag to end of dst */ 326 scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen, 327 crypto_aead_authsize(aead), 1); 328 return err; 329 } 330 331 static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, 332 int err) 333 { 334 struct aead_request *req = areq->data; 335 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 336 struct crypto_aead *aead = crypto_aead_reqtfm(req); 337 unsigned int authsize = crypto_aead_authsize(aead); 338 unsigned int cryptlen = req->cryptlen - authsize; 339 struct scatterlist *dst; 340 341 pctx->flags = 0; 342 343 dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst); 344 345 if (!err) { 346 err = crypto_ccm_auth(req, dst, cryptlen); 347 if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize)) 348 err = -EBADMSG; 349 } 350 aead_request_complete(req, err); 351 } 352 353 static int crypto_ccm_decrypt(struct aead_request *req) 354 { 355 struct crypto_aead *aead = crypto_aead_reqtfm(req); 356 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 357 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 358 struct skcipher_request *skreq = &pctx->skreq; 359 struct scatterlist *dst; 360 unsigned int authsize = crypto_aead_authsize(aead); 361 unsigned int cryptlen = req->cryptlen; 362 u8 *authtag = pctx->auth_tag; 363 u8 *odata = pctx->odata; 364 u8 *iv = pctx->idata; 365 int err; 366 367 cryptlen -= authsize; 368 369 err = crypto_ccm_init_crypt(req, authtag); 370 if (err) 371 return err; 372 373 scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen, 374 authsize, 0); 375 376 dst = pctx->src; 377 if (req->src != req->dst) 378 dst = pctx->dst; 379 380 memcpy(iv, req->iv, 16); 381 382 skcipher_request_set_tfm(skreq, ctx->ctr); 383 skcipher_request_set_callback(skreq, pctx->flags, 384 crypto_ccm_decrypt_done, req); 385 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); 386 err = crypto_skcipher_decrypt(skreq); 387 if (err) 388 return err; 389 390 err = crypto_ccm_auth(req, sg_next(dst), cryptlen); 391 if (err) 392 return err; 393 394 /* verify */ 395 if (crypto_memneq(authtag, odata, authsize)) 396 return -EBADMSG; 397 398 return err; 399 } 400 401 static int crypto_ccm_init_tfm(struct crypto_aead *tfm) 402 { 403 struct aead_instance *inst = aead_alg_instance(tfm); 404 struct ccm_instance_ctx *ictx = aead_instance_ctx(inst); 405 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); 406 struct crypto_ahash *mac; 407 struct crypto_skcipher *ctr; 408 unsigned long align; 409 int err; 410 411 mac = crypto_spawn_ahash(&ictx->mac); 412 if (IS_ERR(mac)) 413 return PTR_ERR(mac); 414 415 ctr = crypto_spawn_skcipher(&ictx->ctr); 416 err = PTR_ERR(ctr); 417 if (IS_ERR(ctr)) 418 goto err_free_mac; 419 420 ctx->mac = mac; 421 ctx->ctr = ctr; 422 423 align = crypto_aead_alignmask(tfm); 424 align &= ~(crypto_tfm_ctx_alignment() - 1); 425 crypto_aead_set_reqsize( 426 tfm, 427 align + sizeof(struct crypto_ccm_req_priv_ctx) + 428 max(crypto_ahash_reqsize(mac), crypto_skcipher_reqsize(ctr))); 429 430 return 0; 431 432 err_free_mac: 433 crypto_free_ahash(mac); 434 return err; 435 } 436 437 static void crypto_ccm_exit_tfm(struct crypto_aead *tfm) 438 { 439 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); 440 441 crypto_free_ahash(ctx->mac); 442 crypto_free_skcipher(ctx->ctr); 443 } 444 445 static void crypto_ccm_free(struct aead_instance *inst) 446 { 447 struct ccm_instance_ctx *ctx = aead_instance_ctx(inst); 448 449 crypto_drop_ahash(&ctx->mac); 450 crypto_drop_skcipher(&ctx->ctr); 451 kfree(inst); 452 } 453 454 static int crypto_ccm_create_common(struct crypto_template *tmpl, 455 struct rtattr **tb, 456 const char *ctr_name, 457 const char *mac_name) 458 { 459 struct crypto_attr_type *algt; 460 struct aead_instance *inst; 461 struct skcipher_alg *ctr; 462 struct crypto_alg *mac_alg; 463 struct hash_alg_common *mac; 464 struct ccm_instance_ctx *ictx; 465 int err; 466 467 algt = crypto_get_attr_type(tb); 468 if (IS_ERR(algt)) 469 return PTR_ERR(algt); 470 471 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 472 return -EINVAL; 473 474 mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type, 475 CRYPTO_ALG_TYPE_HASH, 476 CRYPTO_ALG_TYPE_AHASH_MASK | 477 CRYPTO_ALG_ASYNC); 478 if (IS_ERR(mac_alg)) 479 return PTR_ERR(mac_alg); 480 481 mac = __crypto_hash_alg_common(mac_alg); 482 err = -EINVAL; 483 if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 || 484 mac->digestsize != 16) 485 goto out_put_mac; 486 487 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 488 err = -ENOMEM; 489 if (!inst) 490 goto out_put_mac; 491 492 ictx = aead_instance_ctx(inst); 493 err = crypto_init_ahash_spawn(&ictx->mac, mac, 494 aead_crypto_instance(inst)); 495 if (err) 496 goto err_free_inst; 497 498 crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst)); 499 err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0, 500 crypto_requires_sync(algt->type, 501 algt->mask)); 502 if (err) 503 goto err_drop_mac; 504 505 ctr = crypto_spawn_skcipher_alg(&ictx->ctr); 506 507 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */ 508 err = -EINVAL; 509 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 || 510 crypto_skcipher_alg_ivsize(ctr) != 16 || 511 ctr->base.cra_blocksize != 1) 512 goto err_drop_ctr; 513 514 /* ctr and cbcmac must use the same underlying block cipher. */ 515 if (strcmp(ctr->base.cra_name + 4, mac->base.cra_name + 7) != 0) 516 goto err_drop_ctr; 517 518 err = -ENAMETOOLONG; 519 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 520 "ccm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME) 521 goto err_drop_ctr; 522 523 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 524 "ccm_base(%s,%s)", ctr->base.cra_driver_name, 525 mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 526 goto err_drop_ctr; 527 528 inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC; 529 inst->alg.base.cra_priority = (mac->base.cra_priority + 530 ctr->base.cra_priority) / 2; 531 inst->alg.base.cra_blocksize = 1; 532 inst->alg.base.cra_alignmask = mac->base.cra_alignmask | 533 ctr->base.cra_alignmask; 534 inst->alg.ivsize = 16; 535 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); 536 inst->alg.maxauthsize = 16; 537 inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx); 538 inst->alg.init = crypto_ccm_init_tfm; 539 inst->alg.exit = crypto_ccm_exit_tfm; 540 inst->alg.setkey = crypto_ccm_setkey; 541 inst->alg.setauthsize = crypto_ccm_setauthsize; 542 inst->alg.encrypt = crypto_ccm_encrypt; 543 inst->alg.decrypt = crypto_ccm_decrypt; 544 545 inst->free = crypto_ccm_free; 546 547 err = aead_register_instance(tmpl, inst); 548 if (err) 549 goto err_drop_ctr; 550 551 out_put_mac: 552 crypto_mod_put(mac_alg); 553 return err; 554 555 err_drop_ctr: 556 crypto_drop_skcipher(&ictx->ctr); 557 err_drop_mac: 558 crypto_drop_ahash(&ictx->mac); 559 err_free_inst: 560 kfree(inst); 561 goto out_put_mac; 562 } 563 564 static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb) 565 { 566 const char *cipher_name; 567 char ctr_name[CRYPTO_MAX_ALG_NAME]; 568 char mac_name[CRYPTO_MAX_ALG_NAME]; 569 570 cipher_name = crypto_attr_alg_name(tb[1]); 571 if (IS_ERR(cipher_name)) 572 return PTR_ERR(cipher_name); 573 574 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", 575 cipher_name) >= CRYPTO_MAX_ALG_NAME) 576 return -ENAMETOOLONG; 577 578 if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)", 579 cipher_name) >= CRYPTO_MAX_ALG_NAME) 580 return -ENAMETOOLONG; 581 582 return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name); 583 } 584 585 static int crypto_ccm_base_create(struct crypto_template *tmpl, 586 struct rtattr **tb) 587 { 588 const char *ctr_name; 589 const char *mac_name; 590 591 ctr_name = crypto_attr_alg_name(tb[1]); 592 if (IS_ERR(ctr_name)) 593 return PTR_ERR(ctr_name); 594 595 mac_name = crypto_attr_alg_name(tb[2]); 596 if (IS_ERR(mac_name)) 597 return PTR_ERR(mac_name); 598 599 return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name); 600 } 601 602 static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key, 603 unsigned int keylen) 604 { 605 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); 606 struct crypto_aead *child = ctx->child; 607 int err; 608 609 if (keylen < 3) 610 return -EINVAL; 611 612 keylen -= 3; 613 memcpy(ctx->nonce, key + keylen, 3); 614 615 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); 616 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & 617 CRYPTO_TFM_REQ_MASK); 618 err = crypto_aead_setkey(child, key, keylen); 619 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & 620 CRYPTO_TFM_RES_MASK); 621 622 return err; 623 } 624 625 static int crypto_rfc4309_setauthsize(struct crypto_aead *parent, 626 unsigned int authsize) 627 { 628 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); 629 630 switch (authsize) { 631 case 8: 632 case 12: 633 case 16: 634 break; 635 default: 636 return -EINVAL; 637 } 638 639 return crypto_aead_setauthsize(ctx->child, authsize); 640 } 641 642 static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req) 643 { 644 struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req); 645 struct aead_request *subreq = &rctx->subreq; 646 struct crypto_aead *aead = crypto_aead_reqtfm(req); 647 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead); 648 struct crypto_aead *child = ctx->child; 649 struct scatterlist *sg; 650 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), 651 crypto_aead_alignmask(child) + 1); 652 653 /* L' */ 654 iv[0] = 3; 655 656 memcpy(iv + 1, ctx->nonce, 3); 657 memcpy(iv + 4, req->iv, 8); 658 659 scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0); 660 661 sg_init_table(rctx->src, 3); 662 sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); 663 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); 664 if (sg != rctx->src + 1) 665 sg_chain(rctx->src, 2, sg); 666 667 if (req->src != req->dst) { 668 sg_init_table(rctx->dst, 3); 669 sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); 670 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); 671 if (sg != rctx->dst + 1) 672 sg_chain(rctx->dst, 2, sg); 673 } 674 675 aead_request_set_tfm(subreq, child); 676 aead_request_set_callback(subreq, req->base.flags, req->base.complete, 677 req->base.data); 678 aead_request_set_crypt(subreq, rctx->src, 679 req->src == req->dst ? rctx->src : rctx->dst, 680 req->cryptlen, iv); 681 aead_request_set_ad(subreq, req->assoclen - 8); 682 683 return subreq; 684 } 685 686 static int crypto_rfc4309_encrypt(struct aead_request *req) 687 { 688 if (req->assoclen != 16 && req->assoclen != 20) 689 return -EINVAL; 690 691 req = crypto_rfc4309_crypt(req); 692 693 return crypto_aead_encrypt(req); 694 } 695 696 static int crypto_rfc4309_decrypt(struct aead_request *req) 697 { 698 if (req->assoclen != 16 && req->assoclen != 20) 699 return -EINVAL; 700 701 req = crypto_rfc4309_crypt(req); 702 703 return crypto_aead_decrypt(req); 704 } 705 706 static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm) 707 { 708 struct aead_instance *inst = aead_alg_instance(tfm); 709 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst); 710 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm); 711 struct crypto_aead *aead; 712 unsigned long align; 713 714 aead = crypto_spawn_aead(spawn); 715 if (IS_ERR(aead)) 716 return PTR_ERR(aead); 717 718 ctx->child = aead; 719 720 align = crypto_aead_alignmask(aead); 721 align &= ~(crypto_tfm_ctx_alignment() - 1); 722 crypto_aead_set_reqsize( 723 tfm, 724 sizeof(struct crypto_rfc4309_req_ctx) + 725 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + 726 align + 32); 727 728 return 0; 729 } 730 731 static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm) 732 { 733 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm); 734 735 crypto_free_aead(ctx->child); 736 } 737 738 static void crypto_rfc4309_free(struct aead_instance *inst) 739 { 740 crypto_drop_aead(aead_instance_ctx(inst)); 741 kfree(inst); 742 } 743 744 static int crypto_rfc4309_create(struct crypto_template *tmpl, 745 struct rtattr **tb) 746 { 747 struct crypto_attr_type *algt; 748 struct aead_instance *inst; 749 struct crypto_aead_spawn *spawn; 750 struct aead_alg *alg; 751 const char *ccm_name; 752 int err; 753 754 algt = crypto_get_attr_type(tb); 755 if (IS_ERR(algt)) 756 return PTR_ERR(algt); 757 758 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 759 return -EINVAL; 760 761 ccm_name = crypto_attr_alg_name(tb[1]); 762 if (IS_ERR(ccm_name)) 763 return PTR_ERR(ccm_name); 764 765 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 766 if (!inst) 767 return -ENOMEM; 768 769 spawn = aead_instance_ctx(inst); 770 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); 771 err = crypto_grab_aead(spawn, ccm_name, 0, 772 crypto_requires_sync(algt->type, algt->mask)); 773 if (err) 774 goto out_free_inst; 775 776 alg = crypto_spawn_aead_alg(spawn); 777 778 err = -EINVAL; 779 780 /* We only support 16-byte blocks. */ 781 if (crypto_aead_alg_ivsize(alg) != 16) 782 goto out_drop_alg; 783 784 /* Not a stream cipher? */ 785 if (alg->base.cra_blocksize != 1) 786 goto out_drop_alg; 787 788 err = -ENAMETOOLONG; 789 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 790 "rfc4309(%s)", alg->base.cra_name) >= 791 CRYPTO_MAX_ALG_NAME || 792 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 793 "rfc4309(%s)", alg->base.cra_driver_name) >= 794 CRYPTO_MAX_ALG_NAME) 795 goto out_drop_alg; 796 797 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 798 inst->alg.base.cra_priority = alg->base.cra_priority; 799 inst->alg.base.cra_blocksize = 1; 800 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 801 802 inst->alg.ivsize = 8; 803 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 804 inst->alg.maxauthsize = 16; 805 806 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx); 807 808 inst->alg.init = crypto_rfc4309_init_tfm; 809 inst->alg.exit = crypto_rfc4309_exit_tfm; 810 811 inst->alg.setkey = crypto_rfc4309_setkey; 812 inst->alg.setauthsize = crypto_rfc4309_setauthsize; 813 inst->alg.encrypt = crypto_rfc4309_encrypt; 814 inst->alg.decrypt = crypto_rfc4309_decrypt; 815 816 inst->free = crypto_rfc4309_free; 817 818 err = aead_register_instance(tmpl, inst); 819 if (err) 820 goto out_drop_alg; 821 822 out: 823 return err; 824 825 out_drop_alg: 826 crypto_drop_aead(spawn); 827 out_free_inst: 828 kfree(inst); 829 goto out; 830 } 831 832 static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent, 833 const u8 *inkey, unsigned int keylen) 834 { 835 struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 836 837 return crypto_cipher_setkey(ctx->child, inkey, keylen); 838 } 839 840 static int crypto_cbcmac_digest_init(struct shash_desc *pdesc) 841 { 842 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 843 int bs = crypto_shash_digestsize(pdesc->tfm); 844 u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs; 845 846 ctx->len = 0; 847 memset(dg, 0, bs); 848 849 return 0; 850 } 851 852 static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p, 853 unsigned int len) 854 { 855 struct crypto_shash *parent = pdesc->tfm; 856 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 857 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 858 struct crypto_cipher *tfm = tctx->child; 859 int bs = crypto_shash_digestsize(parent); 860 u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; 861 862 while (len > 0) { 863 unsigned int l = min(len, bs - ctx->len); 864 865 crypto_xor(dg + ctx->len, p, l); 866 ctx->len +=l; 867 len -= l; 868 p += l; 869 870 if (ctx->len == bs) { 871 crypto_cipher_encrypt_one(tfm, dg, dg); 872 ctx->len = 0; 873 } 874 } 875 876 return 0; 877 } 878 879 static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out) 880 { 881 struct crypto_shash *parent = pdesc->tfm; 882 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 883 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 884 struct crypto_cipher *tfm = tctx->child; 885 int bs = crypto_shash_digestsize(parent); 886 u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; 887 888 if (ctx->len) 889 crypto_cipher_encrypt_one(tfm, dg, dg); 890 891 memcpy(out, dg, bs); 892 return 0; 893 } 894 895 static int cbcmac_init_tfm(struct crypto_tfm *tfm) 896 { 897 struct crypto_cipher *cipher; 898 struct crypto_instance *inst = (void *)tfm->__crt_alg; 899 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 900 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 901 902 cipher = crypto_spawn_cipher(spawn); 903 if (IS_ERR(cipher)) 904 return PTR_ERR(cipher); 905 906 ctx->child = cipher; 907 908 return 0; 909 }; 910 911 static void cbcmac_exit_tfm(struct crypto_tfm *tfm) 912 { 913 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 914 crypto_free_cipher(ctx->child); 915 } 916 917 static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb) 918 { 919 struct shash_instance *inst; 920 struct crypto_alg *alg; 921 int err; 922 923 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 924 if (err) 925 return err; 926 927 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 928 CRYPTO_ALG_TYPE_MASK); 929 if (IS_ERR(alg)) 930 return PTR_ERR(alg); 931 932 inst = shash_alloc_instance("cbcmac", alg); 933 err = PTR_ERR(inst); 934 if (IS_ERR(inst)) 935 goto out_put_alg; 936 937 err = crypto_init_spawn(shash_instance_ctx(inst), alg, 938 shash_crypto_instance(inst), 939 CRYPTO_ALG_TYPE_MASK); 940 if (err) 941 goto out_free_inst; 942 943 inst->alg.base.cra_priority = alg->cra_priority; 944 inst->alg.base.cra_blocksize = 1; 945 946 inst->alg.digestsize = alg->cra_blocksize; 947 inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx), 948 alg->cra_alignmask + 1) + 949 alg->cra_blocksize; 950 951 inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx); 952 inst->alg.base.cra_init = cbcmac_init_tfm; 953 inst->alg.base.cra_exit = cbcmac_exit_tfm; 954 955 inst->alg.init = crypto_cbcmac_digest_init; 956 inst->alg.update = crypto_cbcmac_digest_update; 957 inst->alg.final = crypto_cbcmac_digest_final; 958 inst->alg.setkey = crypto_cbcmac_digest_setkey; 959 960 err = shash_register_instance(tmpl, inst); 961 962 out_free_inst: 963 if (err) 964 shash_free_instance(shash_crypto_instance(inst)); 965 966 out_put_alg: 967 crypto_mod_put(alg); 968 return err; 969 } 970 971 static struct crypto_template crypto_ccm_tmpls[] = { 972 { 973 .name = "cbcmac", 974 .create = cbcmac_create, 975 .free = shash_free_instance, 976 .module = THIS_MODULE, 977 }, { 978 .name = "ccm_base", 979 .create = crypto_ccm_base_create, 980 .module = THIS_MODULE, 981 }, { 982 .name = "ccm", 983 .create = crypto_ccm_create, 984 .module = THIS_MODULE, 985 }, { 986 .name = "rfc4309", 987 .create = crypto_rfc4309_create, 988 .module = THIS_MODULE, 989 }, 990 }; 991 992 static int __init crypto_ccm_module_init(void) 993 { 994 return crypto_register_templates(crypto_ccm_tmpls, 995 ARRAY_SIZE(crypto_ccm_tmpls)); 996 } 997 998 static void __exit crypto_ccm_module_exit(void) 999 { 1000 crypto_unregister_templates(crypto_ccm_tmpls, 1001 ARRAY_SIZE(crypto_ccm_tmpls)); 1002 } 1003 1004 subsys_initcall(crypto_ccm_module_init); 1005 module_exit(crypto_ccm_module_exit); 1006 1007 MODULE_LICENSE("GPL"); 1008 MODULE_DESCRIPTION("Counter with CBC MAC"); 1009 MODULE_ALIAS_CRYPTO("ccm_base"); 1010 MODULE_ALIAS_CRYPTO("rfc4309"); 1011 MODULE_ALIAS_CRYPTO("ccm"); 1012 MODULE_ALIAS_CRYPTO("cbcmac"); 1013