1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Asynchronous Cryptographic Hash operations. 4 * 5 * This is the implementation of the ahash (asynchronous hash) API. It differs 6 * from shash (synchronous hash) in that ahash supports asynchronous operations, 7 * and it hashes data from scatterlists instead of virtually addressed buffers. 8 * 9 * The ahash API provides access to both ahash and shash algorithms. The shash 10 * API only provides access to shash algorithms. 11 * 12 * Copyright (c) 2008 Loc Ho <lho@amcc.com> 13 */ 14 15 #include <crypto/scatterwalk.h> 16 #include <linux/cryptouser.h> 17 #include <linux/err.h> 18 #include <linux/kernel.h> 19 #include <linux/mm.h> 20 #include <linux/module.h> 21 #include <linux/scatterlist.h> 22 #include <linux/slab.h> 23 #include <linux/seq_file.h> 24 #include <linux/string.h> 25 #include <linux/string_choices.h> 26 #include <net/netlink.h> 27 28 #include "hash.h" 29 30 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e 31 32 struct crypto_hash_walk { 33 const char *data; 34 35 unsigned int offset; 36 unsigned int flags; 37 38 struct page *pg; 39 unsigned int entrylen; 40 41 unsigned int total; 42 struct scatterlist *sg; 43 }; 44 45 static int ahash_def_finup(struct ahash_request *req); 46 47 static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm) 48 { 49 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 50 CRYPTO_AHASH_ALG_BLOCK_ONLY; 51 } 52 53 static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm) 54 { 55 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 56 CRYPTO_AHASH_ALG_FINAL_NONZERO; 57 } 58 59 static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm) 60 { 61 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 62 CRYPTO_ALG_NEED_FALLBACK; 63 } 64 65 static inline void ahash_op_done(void *data, int err, 66 int (*finish)(struct ahash_request *, int)) 67 { 68 struct ahash_request *areq = data; 69 crypto_completion_t compl; 70 71 compl = areq->saved_complete; 72 data = areq->saved_data; 73 if (err == -EINPROGRESS) 74 goto out; 75 76 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 77 78 err = finish(areq, err); 79 if (err == -EINPROGRESS || err == -EBUSY) 80 return; 81 82 out: 83 compl(data, err); 84 } 85 86 static int hash_walk_next(struct crypto_hash_walk *walk) 87 { 88 unsigned int offset = walk->offset; 89 unsigned int nbytes = min(walk->entrylen, 90 ((unsigned int)(PAGE_SIZE)) - offset); 91 92 walk->data = kmap_local_page(walk->pg); 93 walk->data += offset; 94 walk->entrylen -= nbytes; 95 return nbytes; 96 } 97 98 static int hash_walk_new_entry(struct crypto_hash_walk *walk) 99 { 100 struct scatterlist *sg; 101 102 sg = walk->sg; 103 walk->offset = sg->offset; 104 walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT)); 105 walk->offset = offset_in_page(walk->offset); 106 walk->entrylen = sg->length; 107 108 if (walk->entrylen > walk->total) 109 walk->entrylen = walk->total; 110 walk->total -= walk->entrylen; 111 112 return hash_walk_next(walk); 113 } 114 115 static int crypto_hash_walk_first(struct ahash_request *req, 116 struct crypto_hash_walk *walk) 117 { 118 walk->total = req->nbytes; 119 walk->entrylen = 0; 120 121 if (!walk->total) 122 return 0; 123 124 walk->flags = req->base.flags; 125 126 if (ahash_request_isvirt(req)) { 127 walk->data = req->svirt; 128 walk->total = 0; 129 return req->nbytes; 130 } 131 132 walk->sg = req->src; 133 134 return hash_walk_new_entry(walk); 135 } 136 137 static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) 138 { 139 if ((walk->flags & CRYPTO_AHASH_REQ_VIRT)) 140 return err; 141 142 walk->data -= walk->offset; 143 144 kunmap_local(walk->data); 145 crypto_yield(walk->flags); 146 147 if (err) 148 return err; 149 150 if (walk->entrylen) { 151 walk->offset = 0; 152 walk->pg++; 153 return hash_walk_next(walk); 154 } 155 156 if (!walk->total) 157 return 0; 158 159 walk->sg = sg_next(walk->sg); 160 161 return hash_walk_new_entry(walk); 162 } 163 164 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk) 165 { 166 return !(walk->entrylen | walk->total); 167 } 168 169 /* 170 * For an ahash tfm that is using an shash algorithm (instead of an ahash 171 * algorithm), this returns the underlying shash tfm. 172 */ 173 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm) 174 { 175 return *(struct crypto_shash **)crypto_ahash_ctx(tfm); 176 } 177 178 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req, 179 struct crypto_ahash *tfm) 180 { 181 struct shash_desc *desc = ahash_request_ctx(req); 182 183 desc->tfm = ahash_to_shash(tfm); 184 return desc; 185 } 186 187 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 188 { 189 struct crypto_hash_walk walk; 190 int nbytes; 191 192 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 193 nbytes = crypto_hash_walk_done(&walk, nbytes)) 194 nbytes = crypto_shash_update(desc, walk.data, nbytes); 195 196 return nbytes; 197 } 198 EXPORT_SYMBOL_GPL(shash_ahash_update); 199 200 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 201 { 202 struct crypto_hash_walk walk; 203 int nbytes; 204 205 nbytes = crypto_hash_walk_first(req, &walk); 206 if (!nbytes) 207 return crypto_shash_final(desc, req->result); 208 209 do { 210 nbytes = crypto_hash_walk_last(&walk) ? 211 crypto_shash_finup(desc, walk.data, nbytes, 212 req->result) : 213 crypto_shash_update(desc, walk.data, nbytes); 214 nbytes = crypto_hash_walk_done(&walk, nbytes); 215 } while (nbytes > 0); 216 217 return nbytes; 218 } 219 EXPORT_SYMBOL_GPL(shash_ahash_finup); 220 221 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 222 { 223 unsigned int nbytes = req->nbytes; 224 struct scatterlist *sg; 225 unsigned int offset; 226 struct page *page; 227 const u8 *data; 228 int err; 229 230 data = req->svirt; 231 if (!nbytes || ahash_request_isvirt(req)) 232 return crypto_shash_digest(desc, data, nbytes, req->result); 233 234 sg = req->src; 235 if (nbytes > sg->length) 236 return crypto_shash_init(desc) ?: 237 shash_ahash_finup(req, desc); 238 239 page = sg_page(sg); 240 offset = sg->offset; 241 data = lowmem_page_address(page) + offset; 242 if (!IS_ENABLED(CONFIG_HIGHMEM)) 243 return crypto_shash_digest(desc, data, nbytes, req->result); 244 245 page = nth_page(page, offset >> PAGE_SHIFT); 246 offset = offset_in_page(offset); 247 248 if (nbytes > (unsigned int)PAGE_SIZE - offset) 249 return crypto_shash_init(desc) ?: 250 shash_ahash_finup(req, desc); 251 252 data = kmap_local_page(page); 253 err = crypto_shash_digest(desc, data + offset, nbytes, 254 req->result); 255 kunmap_local(data); 256 return err; 257 } 258 EXPORT_SYMBOL_GPL(shash_ahash_digest); 259 260 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm) 261 { 262 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 263 264 crypto_free_shash(*ctx); 265 } 266 267 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm) 268 { 269 struct crypto_alg *calg = tfm->__crt_alg; 270 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 271 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 272 struct crypto_shash *shash; 273 274 if (!crypto_mod_get(calg)) 275 return -EAGAIN; 276 277 shash = crypto_create_tfm(calg, &crypto_shash_type); 278 if (IS_ERR(shash)) { 279 crypto_mod_put(calg); 280 return PTR_ERR(shash); 281 } 282 283 crt->using_shash = true; 284 *ctx = shash; 285 tfm->exit = crypto_exit_ahash_using_shash; 286 287 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & 288 CRYPTO_TFM_NEED_KEY); 289 290 return 0; 291 } 292 293 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 294 unsigned int keylen) 295 { 296 return -ENOSYS; 297 } 298 299 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg) 300 { 301 if (alg->setkey != ahash_nosetkey && 302 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) 303 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 304 } 305 306 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 307 unsigned int keylen) 308 { 309 if (likely(tfm->using_shash)) { 310 struct crypto_shash *shash = ahash_to_shash(tfm); 311 int err; 312 313 err = crypto_shash_setkey(shash, key, keylen); 314 if (unlikely(err)) { 315 crypto_ahash_set_flags(tfm, 316 crypto_shash_get_flags(shash) & 317 CRYPTO_TFM_NEED_KEY); 318 return err; 319 } 320 } else { 321 struct ahash_alg *alg = crypto_ahash_alg(tfm); 322 int err; 323 324 err = alg->setkey(tfm, key, keylen); 325 if (!err && crypto_ahash_need_fallback(tfm)) 326 err = crypto_ahash_setkey(crypto_ahash_fb(tfm), 327 key, keylen); 328 if (unlikely(err)) { 329 ahash_set_needkey(tfm, alg); 330 return err; 331 } 332 } 333 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 334 return 0; 335 } 336 EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 337 338 static int ahash_do_req_chain(struct ahash_request *req, 339 int (*const *op)(struct ahash_request *req)) 340 { 341 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 342 int err; 343 344 if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req)) 345 return (*op)(req); 346 347 if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE) 348 return -ENOSYS; 349 350 { 351 u8 state[HASH_MAX_STATESIZE]; 352 353 if (op == &crypto_ahash_alg(tfm)->digest) { 354 ahash_request_set_tfm(req, crypto_ahash_fb(tfm)); 355 err = crypto_ahash_digest(req); 356 goto out_no_state; 357 } 358 359 err = crypto_ahash_export(req, state); 360 ahash_request_set_tfm(req, crypto_ahash_fb(tfm)); 361 err = err ?: crypto_ahash_import(req, state); 362 363 if (op == &crypto_ahash_alg(tfm)->finup) { 364 err = err ?: crypto_ahash_finup(req); 365 goto out_no_state; 366 } 367 368 err = err ?: 369 crypto_ahash_update(req) ?: 370 crypto_ahash_export(req, state); 371 372 ahash_request_set_tfm(req, tfm); 373 return err ?: crypto_ahash_import(req, state); 374 375 out_no_state: 376 ahash_request_set_tfm(req, tfm); 377 return err; 378 } 379 } 380 381 int crypto_ahash_init(struct ahash_request *req) 382 { 383 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 384 385 if (likely(tfm->using_shash)) 386 return crypto_shash_init(prepare_shash_desc(req, tfm)); 387 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 388 return -ENOKEY; 389 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 390 return -EAGAIN; 391 if (crypto_ahash_block_only(tfm)) { 392 u8 *buf = ahash_request_ctx(req); 393 394 buf += crypto_ahash_reqsize(tfm) - 1; 395 *buf = 0; 396 } 397 return crypto_ahash_alg(tfm)->init(req); 398 } 399 EXPORT_SYMBOL_GPL(crypto_ahash_init); 400 401 static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt) 402 { 403 req->saved_complete = req->base.complete; 404 req->saved_data = req->base.data; 405 req->base.complete = cplt; 406 req->base.data = req; 407 } 408 409 static void ahash_restore_req(struct ahash_request *req) 410 { 411 req->base.complete = req->saved_complete; 412 req->base.data = req->saved_data; 413 } 414 415 static int ahash_update_finish(struct ahash_request *req, int err) 416 { 417 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 418 bool nonzero = crypto_ahash_final_nonzero(tfm); 419 int bs = crypto_ahash_blocksize(tfm); 420 u8 *blenp = ahash_request_ctx(req); 421 int blen; 422 u8 *buf; 423 424 blenp += crypto_ahash_reqsize(tfm) - 1; 425 blen = *blenp; 426 buf = blenp - bs; 427 428 if (blen) { 429 req->src = req->sg_head + 1; 430 if (sg_is_chain(req->src)) 431 req->src = sg_chain_ptr(req->src); 432 } 433 434 req->nbytes += nonzero - blen; 435 436 blen = err < 0 ? 0 : err + nonzero; 437 if (ahash_request_isvirt(req)) 438 memcpy(buf, req->svirt + req->nbytes - blen, blen); 439 else 440 memcpy_from_sglist(buf, req->src, req->nbytes - blen, blen); 441 *blenp = blen; 442 443 ahash_restore_req(req); 444 445 return err; 446 } 447 448 static void ahash_update_done(void *data, int err) 449 { 450 ahash_op_done(data, err, ahash_update_finish); 451 } 452 453 int crypto_ahash_update(struct ahash_request *req) 454 { 455 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 456 bool nonzero = crypto_ahash_final_nonzero(tfm); 457 int bs = crypto_ahash_blocksize(tfm); 458 u8 *blenp = ahash_request_ctx(req); 459 int blen, err; 460 u8 *buf; 461 462 if (likely(tfm->using_shash)) 463 return shash_ahash_update(req, ahash_request_ctx(req)); 464 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 465 return -EAGAIN; 466 if (!crypto_ahash_block_only(tfm)) 467 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update); 468 469 blenp += crypto_ahash_reqsize(tfm) - 1; 470 blen = *blenp; 471 buf = blenp - bs; 472 473 if (blen + req->nbytes < bs + nonzero) { 474 if (ahash_request_isvirt(req)) 475 memcpy(buf + blen, req->svirt, req->nbytes); 476 else 477 memcpy_from_sglist(buf + blen, req->src, 0, 478 req->nbytes); 479 480 *blenp += req->nbytes; 481 return 0; 482 } 483 484 if (blen) { 485 memset(req->sg_head, 0, sizeof(req->sg_head[0])); 486 sg_set_buf(req->sg_head, buf, blen); 487 if (req->src != req->sg_head + 1) 488 sg_chain(req->sg_head, 2, req->src); 489 req->src = req->sg_head; 490 req->nbytes += blen; 491 } 492 req->nbytes -= nonzero; 493 494 ahash_save_req(req, ahash_update_done); 495 496 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update); 497 if (err == -EINPROGRESS || err == -EBUSY) 498 return err; 499 500 return ahash_update_finish(req, err); 501 } 502 EXPORT_SYMBOL_GPL(crypto_ahash_update); 503 504 static int ahash_finup_finish(struct ahash_request *req, int err) 505 { 506 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 507 u8 *blenp = ahash_request_ctx(req); 508 int blen; 509 510 blenp += crypto_ahash_reqsize(tfm) - 1; 511 blen = *blenp; 512 513 if (blen) { 514 if (sg_is_last(req->src)) 515 req->src = NULL; 516 else { 517 req->src = req->sg_head + 1; 518 if (sg_is_chain(req->src)) 519 req->src = sg_chain_ptr(req->src); 520 } 521 req->nbytes -= blen; 522 } 523 524 ahash_restore_req(req); 525 526 return err; 527 } 528 529 static void ahash_finup_done(void *data, int err) 530 { 531 ahash_op_done(data, err, ahash_finup_finish); 532 } 533 534 int crypto_ahash_finup(struct ahash_request *req) 535 { 536 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 537 int bs = crypto_ahash_blocksize(tfm); 538 u8 *blenp = ahash_request_ctx(req); 539 int blen, err; 540 u8 *buf; 541 542 if (likely(tfm->using_shash)) 543 return shash_ahash_finup(req, ahash_request_ctx(req)); 544 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 545 return -EAGAIN; 546 if (!crypto_ahash_alg(tfm)->finup) 547 return ahash_def_finup(req); 548 if (!crypto_ahash_block_only(tfm)) 549 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup); 550 551 blenp += crypto_ahash_reqsize(tfm) - 1; 552 blen = *blenp; 553 buf = blenp - bs; 554 555 if (blen) { 556 memset(req->sg_head, 0, sizeof(req->sg_head[0])); 557 sg_set_buf(req->sg_head, buf, blen); 558 if (!req->src) 559 sg_mark_end(req->sg_head); 560 else if (req->src != req->sg_head + 1) 561 sg_chain(req->sg_head, 2, req->src); 562 req->src = req->sg_head; 563 req->nbytes += blen; 564 } 565 566 ahash_save_req(req, ahash_finup_done); 567 568 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup); 569 if (err == -EINPROGRESS || err == -EBUSY) 570 return err; 571 572 return ahash_finup_finish(req, err); 573 } 574 EXPORT_SYMBOL_GPL(crypto_ahash_finup); 575 576 int crypto_ahash_digest(struct ahash_request *req) 577 { 578 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 579 580 if (likely(tfm->using_shash)) 581 return shash_ahash_digest(req, prepare_shash_desc(req, tfm)); 582 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 583 return -EAGAIN; 584 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 585 return -ENOKEY; 586 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->digest); 587 } 588 EXPORT_SYMBOL_GPL(crypto_ahash_digest); 589 590 static void ahash_def_finup_done2(void *data, int err) 591 { 592 struct ahash_request *areq = data; 593 594 if (err == -EINPROGRESS) 595 return; 596 597 ahash_restore_req(areq); 598 ahash_request_complete(areq, err); 599 } 600 601 static int ahash_def_finup_finish1(struct ahash_request *req, int err) 602 { 603 if (err) 604 goto out; 605 606 req->base.complete = ahash_def_finup_done2; 607 608 err = crypto_ahash_final(req); 609 if (err == -EINPROGRESS || err == -EBUSY) 610 return err; 611 612 out: 613 ahash_restore_req(req); 614 return err; 615 } 616 617 static void ahash_def_finup_done1(void *data, int err) 618 { 619 ahash_op_done(data, err, ahash_def_finup_finish1); 620 } 621 622 static int ahash_def_finup(struct ahash_request *req) 623 { 624 int err; 625 626 ahash_save_req(req, ahash_def_finup_done1); 627 628 err = crypto_ahash_update(req); 629 if (err == -EINPROGRESS || err == -EBUSY) 630 return err; 631 632 return ahash_def_finup_finish1(req, err); 633 } 634 635 int crypto_ahash_export_core(struct ahash_request *req, void *out) 636 { 637 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 638 639 if (likely(tfm->using_shash)) 640 return crypto_shash_export_core(ahash_request_ctx(req), out); 641 return crypto_ahash_alg(tfm)->export_core(req, out); 642 } 643 EXPORT_SYMBOL_GPL(crypto_ahash_export_core); 644 645 int crypto_ahash_export(struct ahash_request *req, void *out) 646 { 647 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 648 649 if (likely(tfm->using_shash)) 650 return crypto_shash_export(ahash_request_ctx(req), out); 651 if (crypto_ahash_block_only(tfm)) { 652 unsigned int plen = crypto_ahash_blocksize(tfm) + 1; 653 unsigned int reqsize = crypto_ahash_reqsize(tfm); 654 unsigned int ss = crypto_ahash_statesize(tfm); 655 u8 *buf = ahash_request_ctx(req); 656 657 memcpy(out + ss - plen, buf + reqsize - plen, plen); 658 } 659 return crypto_ahash_alg(tfm)->export(req, out); 660 } 661 EXPORT_SYMBOL_GPL(crypto_ahash_export); 662 663 int crypto_ahash_import_core(struct ahash_request *req, const void *in) 664 { 665 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 666 667 if (likely(tfm->using_shash)) 668 return crypto_shash_import_core(prepare_shash_desc(req, tfm), 669 in); 670 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 671 return -ENOKEY; 672 return crypto_ahash_alg(tfm)->import_core(req, in); 673 } 674 EXPORT_SYMBOL_GPL(crypto_ahash_import_core); 675 676 int crypto_ahash_import(struct ahash_request *req, const void *in) 677 { 678 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 679 680 if (likely(tfm->using_shash)) 681 return crypto_shash_import(prepare_shash_desc(req, tfm), in); 682 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 683 return -ENOKEY; 684 if (crypto_ahash_block_only(tfm)) { 685 unsigned int reqsize = crypto_ahash_reqsize(tfm); 686 u8 *buf = ahash_request_ctx(req); 687 688 buf[reqsize - 1] = 0; 689 } 690 return crypto_ahash_alg(tfm)->import(req, in); 691 } 692 EXPORT_SYMBOL_GPL(crypto_ahash_import); 693 694 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm) 695 { 696 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 697 struct ahash_alg *alg = crypto_ahash_alg(hash); 698 699 if (alg->exit_tfm) 700 alg->exit_tfm(hash); 701 else if (tfm->__crt_alg->cra_exit) 702 tfm->__crt_alg->cra_exit(tfm); 703 704 if (crypto_ahash_need_fallback(hash)) 705 crypto_free_ahash(crypto_ahash_fb(hash)); 706 } 707 708 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) 709 { 710 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 711 struct ahash_alg *alg = crypto_ahash_alg(hash); 712 struct crypto_ahash *fb = NULL; 713 int err; 714 715 crypto_ahash_set_statesize(hash, alg->halg.statesize); 716 crypto_ahash_set_reqsize(hash, crypto_tfm_alg_reqsize(tfm)); 717 718 if (tfm->__crt_alg->cra_type == &crypto_shash_type) 719 return crypto_init_ahash_using_shash(tfm); 720 721 if (crypto_ahash_need_fallback(hash)) { 722 fb = crypto_alloc_ahash(crypto_ahash_alg_name(hash), 723 CRYPTO_ALG_REQ_VIRT, 724 CRYPTO_ALG_ASYNC | 725 CRYPTO_ALG_REQ_VIRT | 726 CRYPTO_AHASH_ALG_NO_EXPORT_CORE); 727 if (IS_ERR(fb)) 728 return PTR_ERR(fb); 729 730 tfm->fb = crypto_ahash_tfm(fb); 731 } 732 733 ahash_set_needkey(hash, alg); 734 735 tfm->exit = crypto_ahash_exit_tfm; 736 737 if (alg->init_tfm) 738 err = alg->init_tfm(hash); 739 else if (tfm->__crt_alg->cra_init) 740 err = tfm->__crt_alg->cra_init(tfm); 741 else 742 return 0; 743 744 if (err) 745 goto out_free_sync_hash; 746 747 if (!ahash_is_async(hash) && crypto_ahash_reqsize(hash) > 748 MAX_SYNC_HASH_REQSIZE) 749 goto out_exit_tfm; 750 751 BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE); 752 if (crypto_ahash_reqsize(hash) < HASH_MAX_DESCSIZE) 753 crypto_ahash_set_reqsize(hash, HASH_MAX_DESCSIZE); 754 755 return 0; 756 757 out_exit_tfm: 758 if (alg->exit_tfm) 759 alg->exit_tfm(hash); 760 else if (tfm->__crt_alg->cra_exit) 761 tfm->__crt_alg->cra_exit(tfm); 762 err = -EINVAL; 763 out_free_sync_hash: 764 crypto_free_ahash(fb); 765 return err; 766 } 767 768 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) 769 { 770 if (alg->cra_type == &crypto_shash_type) 771 return sizeof(struct crypto_shash *); 772 773 return crypto_alg_extsize(alg); 774 } 775 776 static void crypto_ahash_free_instance(struct crypto_instance *inst) 777 { 778 struct ahash_instance *ahash = ahash_instance(inst); 779 780 ahash->free(ahash); 781 } 782 783 static int __maybe_unused crypto_ahash_report( 784 struct sk_buff *skb, struct crypto_alg *alg) 785 { 786 struct crypto_report_hash rhash; 787 788 memset(&rhash, 0, sizeof(rhash)); 789 790 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 791 792 rhash.blocksize = alg->cra_blocksize; 793 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; 794 795 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); 796 } 797 798 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 799 __maybe_unused; 800 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 801 { 802 seq_printf(m, "type : ahash\n"); 803 seq_printf(m, "async : %s\n", 804 str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC)); 805 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 806 seq_printf(m, "digestsize : %u\n", 807 __crypto_hash_alg_common(alg)->digestsize); 808 } 809 810 static const struct crypto_type crypto_ahash_type = { 811 .extsize = crypto_ahash_extsize, 812 .init_tfm = crypto_ahash_init_tfm, 813 .free = crypto_ahash_free_instance, 814 #ifdef CONFIG_PROC_FS 815 .show = crypto_ahash_show, 816 #endif 817 #if IS_ENABLED(CONFIG_CRYPTO_USER) 818 .report = crypto_ahash_report, 819 #endif 820 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 821 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, 822 .type = CRYPTO_ALG_TYPE_AHASH, 823 .tfmsize = offsetof(struct crypto_ahash, base), 824 .algsize = offsetof(struct ahash_alg, halg.base), 825 }; 826 827 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn, 828 struct crypto_instance *inst, 829 const char *name, u32 type, u32 mask) 830 { 831 spawn->base.frontend = &crypto_ahash_type; 832 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 833 } 834 EXPORT_SYMBOL_GPL(crypto_grab_ahash); 835 836 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, 837 u32 mask) 838 { 839 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); 840 } 841 EXPORT_SYMBOL_GPL(crypto_alloc_ahash); 842 843 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask) 844 { 845 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask); 846 } 847 EXPORT_SYMBOL_GPL(crypto_has_ahash); 848 849 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg) 850 { 851 struct crypto_alg *alg = &halg->base; 852 853 if (alg->cra_type == &crypto_shash_type) 854 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg)); 855 856 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey; 857 } 858 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey); 859 860 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash) 861 { 862 struct hash_alg_common *halg = crypto_hash_alg_common(hash); 863 struct crypto_tfm *tfm = crypto_ahash_tfm(hash); 864 struct crypto_ahash *fb = NULL; 865 struct crypto_ahash *nhash; 866 struct ahash_alg *alg; 867 int err; 868 869 if (!crypto_hash_alg_has_setkey(halg)) { 870 tfm = crypto_tfm_get(tfm); 871 if (IS_ERR(tfm)) 872 return ERR_CAST(tfm); 873 874 return hash; 875 } 876 877 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm); 878 879 if (IS_ERR(nhash)) 880 return nhash; 881 882 nhash->reqsize = hash->reqsize; 883 nhash->statesize = hash->statesize; 884 885 if (likely(hash->using_shash)) { 886 struct crypto_shash **nctx = crypto_ahash_ctx(nhash); 887 struct crypto_shash *shash; 888 889 shash = crypto_clone_shash(ahash_to_shash(hash)); 890 if (IS_ERR(shash)) { 891 err = PTR_ERR(shash); 892 goto out_free_nhash; 893 } 894 crypto_ahash_tfm(nhash)->exit = crypto_exit_ahash_using_shash; 895 nhash->using_shash = true; 896 *nctx = shash; 897 return nhash; 898 } 899 900 if (crypto_ahash_need_fallback(hash)) { 901 fb = crypto_clone_ahash(crypto_ahash_fb(hash)); 902 err = PTR_ERR(fb); 903 if (IS_ERR(fb)) 904 goto out_free_nhash; 905 906 crypto_ahash_tfm(nhash)->fb = crypto_ahash_tfm(fb); 907 } 908 909 err = -ENOSYS; 910 alg = crypto_ahash_alg(hash); 911 if (!alg->clone_tfm) 912 goto out_free_fb; 913 914 err = alg->clone_tfm(nhash, hash); 915 if (err) 916 goto out_free_fb; 917 918 crypto_ahash_tfm(nhash)->exit = crypto_ahash_exit_tfm; 919 920 return nhash; 921 922 out_free_fb: 923 crypto_free_ahash(fb); 924 out_free_nhash: 925 crypto_free_ahash(nhash); 926 return ERR_PTR(err); 927 } 928 EXPORT_SYMBOL_GPL(crypto_clone_ahash); 929 930 static int ahash_default_export_core(struct ahash_request *req, void *out) 931 { 932 return -ENOSYS; 933 } 934 935 static int ahash_default_import_core(struct ahash_request *req, const void *in) 936 { 937 return -ENOSYS; 938 } 939 940 static int ahash_prepare_alg(struct ahash_alg *alg) 941 { 942 struct crypto_alg *base = &alg->halg.base; 943 int err; 944 945 if (alg->halg.statesize == 0) 946 return -EINVAL; 947 948 if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize) 949 return -EINVAL; 950 951 if (!(base->cra_flags & CRYPTO_ALG_ASYNC) && 952 base->cra_reqsize > MAX_SYNC_HASH_REQSIZE) 953 return -EINVAL; 954 955 err = hash_prepare_alg(&alg->halg); 956 if (err) 957 return err; 958 959 base->cra_type = &crypto_ahash_type; 960 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; 961 962 if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) & 963 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT)) 964 base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK; 965 966 if (!alg->setkey) 967 alg->setkey = ahash_nosetkey; 968 969 if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) { 970 BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256); 971 if (!alg->finup) 972 return -EINVAL; 973 974 base->cra_reqsize += base->cra_blocksize + 1; 975 alg->halg.statesize += base->cra_blocksize + 1; 976 alg->export_core = alg->export; 977 alg->import_core = alg->import; 978 } else if (!alg->export_core || !alg->import_core) { 979 alg->export_core = ahash_default_export_core; 980 alg->import_core = ahash_default_import_core; 981 base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE; 982 } 983 984 return 0; 985 } 986 987 int crypto_register_ahash(struct ahash_alg *alg) 988 { 989 struct crypto_alg *base = &alg->halg.base; 990 int err; 991 992 err = ahash_prepare_alg(alg); 993 if (err) 994 return err; 995 996 return crypto_register_alg(base); 997 } 998 EXPORT_SYMBOL_GPL(crypto_register_ahash); 999 1000 void crypto_unregister_ahash(struct ahash_alg *alg) 1001 { 1002 crypto_unregister_alg(&alg->halg.base); 1003 } 1004 EXPORT_SYMBOL_GPL(crypto_unregister_ahash); 1005 1006 int crypto_register_ahashes(struct ahash_alg *algs, int count) 1007 { 1008 int i, ret; 1009 1010 for (i = 0; i < count; i++) { 1011 ret = crypto_register_ahash(&algs[i]); 1012 if (ret) 1013 goto err; 1014 } 1015 1016 return 0; 1017 1018 err: 1019 for (--i; i >= 0; --i) 1020 crypto_unregister_ahash(&algs[i]); 1021 1022 return ret; 1023 } 1024 EXPORT_SYMBOL_GPL(crypto_register_ahashes); 1025 1026 void crypto_unregister_ahashes(struct ahash_alg *algs, int count) 1027 { 1028 int i; 1029 1030 for (i = count - 1; i >= 0; --i) 1031 crypto_unregister_ahash(&algs[i]); 1032 } 1033 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes); 1034 1035 int ahash_register_instance(struct crypto_template *tmpl, 1036 struct ahash_instance *inst) 1037 { 1038 int err; 1039 1040 if (WARN_ON(!inst->free)) 1041 return -EINVAL; 1042 1043 err = ahash_prepare_alg(&inst->alg); 1044 if (err) 1045 return err; 1046 1047 return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); 1048 } 1049 EXPORT_SYMBOL_GPL(ahash_register_instance); 1050 1051 void ahash_request_free(struct ahash_request *req) 1052 { 1053 if (unlikely(!req)) 1054 return; 1055 1056 if (!ahash_req_on_stack(req)) { 1057 kfree(req); 1058 return; 1059 } 1060 1061 ahash_request_zero(req); 1062 } 1063 EXPORT_SYMBOL_GPL(ahash_request_free); 1064 1065 int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data, 1066 unsigned int len, u8 *out) 1067 { 1068 HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm)); 1069 int err; 1070 1071 ahash_request_set_callback(req, 0, NULL, NULL); 1072 ahash_request_set_virt(req, data, out, len); 1073 err = crypto_ahash_digest(req); 1074 1075 ahash_request_zero(req); 1076 1077 return err; 1078 } 1079 EXPORT_SYMBOL_GPL(crypto_hash_digest); 1080 1081 void ahash_free_singlespawn_instance(struct ahash_instance *inst) 1082 { 1083 crypto_drop_spawn(ahash_instance_ctx(inst)); 1084 kfree(inst); 1085 } 1086 EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance); 1087 1088 MODULE_LICENSE("GPL"); 1089 MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); 1090