1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Asynchronous Cryptographic Hash operations. 4 * 5 * This is the implementation of the ahash (asynchronous hash) API. It differs 6 * from shash (synchronous hash) in that ahash supports asynchronous operations, 7 * and it hashes data from scatterlists instead of virtually addressed buffers. 8 * 9 * The ahash API provides access to both ahash and shash algorithms. The shash 10 * API only provides access to shash algorithms. 11 * 12 * Copyright (c) 2008 Loc Ho <lho@amcc.com> 13 */ 14 15 #include <crypto/scatterwalk.h> 16 #include <linux/cryptouser.h> 17 #include <linux/err.h> 18 #include <linux/kernel.h> 19 #include <linux/mm.h> 20 #include <linux/module.h> 21 #include <linux/scatterlist.h> 22 #include <linux/slab.h> 23 #include <linux/seq_file.h> 24 #include <linux/string.h> 25 #include <linux/string_choices.h> 26 #include <net/netlink.h> 27 28 #include "hash.h" 29 30 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e 31 32 static int ahash_def_finup(struct ahash_request *req); 33 34 static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm) 35 { 36 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 37 CRYPTO_AHASH_ALG_BLOCK_ONLY; 38 } 39 40 static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm) 41 { 42 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 43 CRYPTO_AHASH_ALG_FINAL_NONZERO; 44 } 45 46 static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm) 47 { 48 return crypto_ahash_alg(tfm)->halg.base.cra_flags & 49 CRYPTO_ALG_NEED_FALLBACK; 50 } 51 52 static inline void ahash_op_done(void *data, int err, 53 int (*finish)(struct ahash_request *, int)) 54 { 55 struct ahash_request *areq = data; 56 crypto_completion_t compl; 57 58 compl = areq->saved_complete; 59 data = areq->saved_data; 60 if (err == -EINPROGRESS) 61 goto out; 62 63 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 64 65 err = finish(areq, err); 66 if (err == -EINPROGRESS || err == -EBUSY) 67 return; 68 69 out: 70 compl(data, err); 71 } 72 73 static int hash_walk_next(struct crypto_hash_walk *walk) 74 { 75 unsigned int offset = walk->offset; 76 unsigned int nbytes = min(walk->entrylen, 77 ((unsigned int)(PAGE_SIZE)) - offset); 78 79 walk->data = kmap_local_page(walk->pg); 80 walk->data += offset; 81 walk->entrylen -= nbytes; 82 return nbytes; 83 } 84 85 static int hash_walk_new_entry(struct crypto_hash_walk *walk) 86 { 87 struct scatterlist *sg; 88 89 sg = walk->sg; 90 walk->offset = sg->offset; 91 walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT)); 92 walk->offset = offset_in_page(walk->offset); 93 walk->entrylen = sg->length; 94 95 if (walk->entrylen > walk->total) 96 walk->entrylen = walk->total; 97 walk->total -= walk->entrylen; 98 99 return hash_walk_next(walk); 100 } 101 102 int crypto_hash_walk_first(struct ahash_request *req, 103 struct crypto_hash_walk *walk) 104 { 105 walk->total = req->nbytes; 106 walk->entrylen = 0; 107 108 if (!walk->total) 109 return 0; 110 111 walk->flags = req->base.flags; 112 113 if (ahash_request_isvirt(req)) { 114 walk->data = req->svirt; 115 walk->total = 0; 116 return req->nbytes; 117 } 118 119 walk->sg = req->src; 120 121 return hash_walk_new_entry(walk); 122 } 123 EXPORT_SYMBOL_GPL(crypto_hash_walk_first); 124 125 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) 126 { 127 if ((walk->flags & CRYPTO_AHASH_REQ_VIRT)) 128 return err; 129 130 walk->data -= walk->offset; 131 132 kunmap_local(walk->data); 133 crypto_yield(walk->flags); 134 135 if (err) 136 return err; 137 138 if (walk->entrylen) { 139 walk->offset = 0; 140 walk->pg++; 141 return hash_walk_next(walk); 142 } 143 144 if (!walk->total) 145 return 0; 146 147 walk->sg = sg_next(walk->sg); 148 149 return hash_walk_new_entry(walk); 150 } 151 EXPORT_SYMBOL_GPL(crypto_hash_walk_done); 152 153 /* 154 * For an ahash tfm that is using an shash algorithm (instead of an ahash 155 * algorithm), this returns the underlying shash tfm. 156 */ 157 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm) 158 { 159 return *(struct crypto_shash **)crypto_ahash_ctx(tfm); 160 } 161 162 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req, 163 struct crypto_ahash *tfm) 164 { 165 struct shash_desc *desc = ahash_request_ctx(req); 166 167 desc->tfm = ahash_to_shash(tfm); 168 return desc; 169 } 170 171 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 172 { 173 struct crypto_hash_walk walk; 174 int nbytes; 175 176 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 177 nbytes = crypto_hash_walk_done(&walk, nbytes)) 178 nbytes = crypto_shash_update(desc, walk.data, nbytes); 179 180 return nbytes; 181 } 182 EXPORT_SYMBOL_GPL(shash_ahash_update); 183 184 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 185 { 186 struct crypto_hash_walk walk; 187 int nbytes; 188 189 nbytes = crypto_hash_walk_first(req, &walk); 190 if (!nbytes) 191 return crypto_shash_final(desc, req->result); 192 193 do { 194 nbytes = crypto_hash_walk_last(&walk) ? 195 crypto_shash_finup(desc, walk.data, nbytes, 196 req->result) : 197 crypto_shash_update(desc, walk.data, nbytes); 198 nbytes = crypto_hash_walk_done(&walk, nbytes); 199 } while (nbytes > 0); 200 201 return nbytes; 202 } 203 EXPORT_SYMBOL_GPL(shash_ahash_finup); 204 205 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 206 { 207 unsigned int nbytes = req->nbytes; 208 struct scatterlist *sg; 209 unsigned int offset; 210 struct page *page; 211 const u8 *data; 212 int err; 213 214 data = req->svirt; 215 if (!nbytes || ahash_request_isvirt(req)) 216 return crypto_shash_digest(desc, data, nbytes, req->result); 217 218 sg = req->src; 219 if (nbytes > sg->length) 220 return crypto_shash_init(desc) ?: 221 shash_ahash_finup(req, desc); 222 223 page = sg_page(sg); 224 offset = sg->offset; 225 data = lowmem_page_address(page) + offset; 226 if (!IS_ENABLED(CONFIG_HIGHMEM)) 227 return crypto_shash_digest(desc, data, nbytes, req->result); 228 229 page = nth_page(page, offset >> PAGE_SHIFT); 230 offset = offset_in_page(offset); 231 232 if (nbytes > (unsigned int)PAGE_SIZE - offset) 233 return crypto_shash_init(desc) ?: 234 shash_ahash_finup(req, desc); 235 236 data = kmap_local_page(page); 237 err = crypto_shash_digest(desc, data + offset, nbytes, 238 req->result); 239 kunmap_local(data); 240 return err; 241 } 242 EXPORT_SYMBOL_GPL(shash_ahash_digest); 243 244 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm) 245 { 246 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 247 248 crypto_free_shash(*ctx); 249 } 250 251 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm) 252 { 253 struct crypto_alg *calg = tfm->__crt_alg; 254 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 255 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 256 struct crypto_shash *shash; 257 258 if (!crypto_mod_get(calg)) 259 return -EAGAIN; 260 261 shash = crypto_create_tfm(calg, &crypto_shash_type); 262 if (IS_ERR(shash)) { 263 crypto_mod_put(calg); 264 return PTR_ERR(shash); 265 } 266 267 crt->using_shash = true; 268 *ctx = shash; 269 tfm->exit = crypto_exit_ahash_using_shash; 270 271 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & 272 CRYPTO_TFM_NEED_KEY); 273 274 return 0; 275 } 276 277 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 278 unsigned int keylen) 279 { 280 return -ENOSYS; 281 } 282 283 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg) 284 { 285 if (alg->setkey != ahash_nosetkey && 286 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) 287 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 288 } 289 290 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 291 unsigned int keylen) 292 { 293 if (likely(tfm->using_shash)) { 294 struct crypto_shash *shash = ahash_to_shash(tfm); 295 int err; 296 297 err = crypto_shash_setkey(shash, key, keylen); 298 if (unlikely(err)) { 299 crypto_ahash_set_flags(tfm, 300 crypto_shash_get_flags(shash) & 301 CRYPTO_TFM_NEED_KEY); 302 return err; 303 } 304 } else { 305 struct ahash_alg *alg = crypto_ahash_alg(tfm); 306 int err; 307 308 err = alg->setkey(tfm, key, keylen); 309 if (!err && crypto_ahash_need_fallback(tfm)) 310 err = crypto_ahash_setkey(crypto_ahash_fb(tfm), 311 key, keylen); 312 if (unlikely(err)) { 313 ahash_set_needkey(tfm, alg); 314 return err; 315 } 316 } 317 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 318 return 0; 319 } 320 EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 321 322 static int ahash_do_req_chain(struct ahash_request *req, 323 int (*const *op)(struct ahash_request *req)) 324 { 325 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 326 int err; 327 328 if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req)) 329 return (*op)(req); 330 331 if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE) 332 return -ENOSYS; 333 334 if (!crypto_ahash_need_fallback(tfm)) 335 return -ENOSYS; 336 337 if (crypto_hash_no_export_core(tfm)) 338 return -ENOSYS; 339 340 { 341 u8 state[HASH_MAX_STATESIZE]; 342 343 if (op == &crypto_ahash_alg(tfm)->digest) { 344 ahash_request_set_tfm(req, crypto_ahash_fb(tfm)); 345 err = crypto_ahash_digest(req); 346 goto out_no_state; 347 } 348 349 err = crypto_ahash_export(req, state); 350 ahash_request_set_tfm(req, crypto_ahash_fb(tfm)); 351 err = err ?: crypto_ahash_import(req, state); 352 353 if (op == &crypto_ahash_alg(tfm)->finup) { 354 err = err ?: crypto_ahash_finup(req); 355 goto out_no_state; 356 } 357 358 err = err ?: 359 crypto_ahash_update(req) ?: 360 crypto_ahash_export(req, state); 361 362 ahash_request_set_tfm(req, tfm); 363 return err ?: crypto_ahash_import(req, state); 364 365 out_no_state: 366 ahash_request_set_tfm(req, tfm); 367 return err; 368 } 369 } 370 371 int crypto_ahash_init(struct ahash_request *req) 372 { 373 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 374 375 if (likely(tfm->using_shash)) 376 return crypto_shash_init(prepare_shash_desc(req, tfm)); 377 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 378 return -ENOKEY; 379 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 380 return -EAGAIN; 381 if (crypto_ahash_block_only(tfm)) { 382 u8 *buf = ahash_request_ctx(req); 383 384 buf += crypto_ahash_reqsize(tfm) - 1; 385 *buf = 0; 386 } 387 return crypto_ahash_alg(tfm)->init(req); 388 } 389 EXPORT_SYMBOL_GPL(crypto_ahash_init); 390 391 static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt) 392 { 393 req->saved_complete = req->base.complete; 394 req->saved_data = req->base.data; 395 req->base.complete = cplt; 396 req->base.data = req; 397 } 398 399 static void ahash_restore_req(struct ahash_request *req) 400 { 401 req->base.complete = req->saved_complete; 402 req->base.data = req->saved_data; 403 } 404 405 static int ahash_update_finish(struct ahash_request *req, int err) 406 { 407 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 408 bool nonzero = crypto_ahash_final_nonzero(tfm); 409 int bs = crypto_ahash_blocksize(tfm); 410 u8 *blenp = ahash_request_ctx(req); 411 int blen; 412 u8 *buf; 413 414 blenp += crypto_ahash_reqsize(tfm) - 1; 415 blen = *blenp; 416 buf = blenp - bs; 417 418 if (blen) { 419 req->src = req->sg_head + 1; 420 if (sg_is_chain(req->src)) 421 req->src = sg_chain_ptr(req->src); 422 } 423 424 req->nbytes += nonzero - blen; 425 426 blen = err < 0 ? 0 : err + nonzero; 427 if (ahash_request_isvirt(req)) 428 memcpy(buf, req->svirt + req->nbytes - blen, blen); 429 else 430 memcpy_from_sglist(buf, req->src, req->nbytes - blen, blen); 431 *blenp = blen; 432 433 ahash_restore_req(req); 434 435 return err; 436 } 437 438 static void ahash_update_done(void *data, int err) 439 { 440 ahash_op_done(data, err, ahash_update_finish); 441 } 442 443 int crypto_ahash_update(struct ahash_request *req) 444 { 445 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 446 bool nonzero = crypto_ahash_final_nonzero(tfm); 447 int bs = crypto_ahash_blocksize(tfm); 448 u8 *blenp = ahash_request_ctx(req); 449 int blen, err; 450 u8 *buf; 451 452 if (likely(tfm->using_shash)) 453 return shash_ahash_update(req, ahash_request_ctx(req)); 454 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 455 return -EAGAIN; 456 if (!crypto_ahash_block_only(tfm)) 457 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update); 458 459 blenp += crypto_ahash_reqsize(tfm) - 1; 460 blen = *blenp; 461 buf = blenp - bs; 462 463 if (blen + req->nbytes < bs + nonzero) { 464 if (ahash_request_isvirt(req)) 465 memcpy(buf + blen, req->svirt, req->nbytes); 466 else 467 memcpy_from_sglist(buf + blen, req->src, 0, 468 req->nbytes); 469 470 *blenp += req->nbytes; 471 return 0; 472 } 473 474 if (blen) { 475 memset(req->sg_head, 0, sizeof(req->sg_head[0])); 476 sg_set_buf(req->sg_head, buf, blen); 477 if (req->src != req->sg_head + 1) 478 sg_chain(req->sg_head, 2, req->src); 479 req->src = req->sg_head; 480 req->nbytes += blen; 481 } 482 req->nbytes -= nonzero; 483 484 ahash_save_req(req, ahash_update_done); 485 486 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update); 487 if (err == -EINPROGRESS || err == -EBUSY) 488 return err; 489 490 return ahash_update_finish(req, err); 491 } 492 EXPORT_SYMBOL_GPL(crypto_ahash_update); 493 494 static int ahash_finup_finish(struct ahash_request *req, int err) 495 { 496 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 497 u8 *blenp = ahash_request_ctx(req); 498 int blen; 499 500 blenp += crypto_ahash_reqsize(tfm) - 1; 501 blen = *blenp; 502 503 if (blen) { 504 if (sg_is_last(req->src)) 505 req->src = NULL; 506 else { 507 req->src = req->sg_head + 1; 508 if (sg_is_chain(req->src)) 509 req->src = sg_chain_ptr(req->src); 510 } 511 req->nbytes -= blen; 512 } 513 514 ahash_restore_req(req); 515 516 return err; 517 } 518 519 static void ahash_finup_done(void *data, int err) 520 { 521 ahash_op_done(data, err, ahash_finup_finish); 522 } 523 524 int crypto_ahash_finup(struct ahash_request *req) 525 { 526 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 527 int bs = crypto_ahash_blocksize(tfm); 528 u8 *blenp = ahash_request_ctx(req); 529 int blen, err; 530 u8 *buf; 531 532 if (likely(tfm->using_shash)) 533 return shash_ahash_finup(req, ahash_request_ctx(req)); 534 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 535 return -EAGAIN; 536 if (!crypto_ahash_alg(tfm)->finup) 537 return ahash_def_finup(req); 538 if (!crypto_ahash_block_only(tfm)) 539 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup); 540 541 blenp += crypto_ahash_reqsize(tfm) - 1; 542 blen = *blenp; 543 buf = blenp - bs; 544 545 if (blen) { 546 memset(req->sg_head, 0, sizeof(req->sg_head[0])); 547 sg_set_buf(req->sg_head, buf, blen); 548 if (!req->src) 549 sg_mark_end(req->sg_head); 550 else if (req->src != req->sg_head + 1) 551 sg_chain(req->sg_head, 2, req->src); 552 req->src = req->sg_head; 553 req->nbytes += blen; 554 } 555 556 ahash_save_req(req, ahash_finup_done); 557 558 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup); 559 if (err == -EINPROGRESS || err == -EBUSY) 560 return err; 561 562 return ahash_finup_finish(req, err); 563 } 564 EXPORT_SYMBOL_GPL(crypto_ahash_finup); 565 566 int crypto_ahash_digest(struct ahash_request *req) 567 { 568 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 569 570 if (likely(tfm->using_shash)) 571 return shash_ahash_digest(req, prepare_shash_desc(req, tfm)); 572 if (ahash_req_on_stack(req) && ahash_is_async(tfm)) 573 return -EAGAIN; 574 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 575 return -ENOKEY; 576 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->digest); 577 } 578 EXPORT_SYMBOL_GPL(crypto_ahash_digest); 579 580 static void ahash_def_finup_done2(void *data, int err) 581 { 582 struct ahash_request *areq = data; 583 584 if (err == -EINPROGRESS) 585 return; 586 587 ahash_restore_req(areq); 588 ahash_request_complete(areq, err); 589 } 590 591 static int ahash_def_finup_finish1(struct ahash_request *req, int err) 592 { 593 if (err) 594 goto out; 595 596 req->base.complete = ahash_def_finup_done2; 597 598 err = crypto_ahash_final(req); 599 if (err == -EINPROGRESS || err == -EBUSY) 600 return err; 601 602 out: 603 ahash_restore_req(req); 604 return err; 605 } 606 607 static void ahash_def_finup_done1(void *data, int err) 608 { 609 ahash_op_done(data, err, ahash_def_finup_finish1); 610 } 611 612 static int ahash_def_finup(struct ahash_request *req) 613 { 614 int err; 615 616 ahash_save_req(req, ahash_def_finup_done1); 617 618 err = crypto_ahash_update(req); 619 if (err == -EINPROGRESS || err == -EBUSY) 620 return err; 621 622 return ahash_def_finup_finish1(req, err); 623 } 624 625 int crypto_ahash_export_core(struct ahash_request *req, void *out) 626 { 627 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 628 629 if (likely(tfm->using_shash)) 630 return crypto_shash_export_core(ahash_request_ctx(req), out); 631 return crypto_ahash_alg(tfm)->export_core(req, out); 632 } 633 EXPORT_SYMBOL_GPL(crypto_ahash_export_core); 634 635 int crypto_ahash_export(struct ahash_request *req, void *out) 636 { 637 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 638 639 if (likely(tfm->using_shash)) 640 return crypto_shash_export(ahash_request_ctx(req), out); 641 if (crypto_ahash_block_only(tfm)) { 642 unsigned int plen = crypto_ahash_blocksize(tfm) + 1; 643 unsigned int reqsize = crypto_ahash_reqsize(tfm); 644 unsigned int ss = crypto_ahash_statesize(tfm); 645 u8 *buf = ahash_request_ctx(req); 646 647 memcpy(out + ss - plen, buf + reqsize - plen, plen); 648 } 649 return crypto_ahash_alg(tfm)->export(req, out); 650 } 651 EXPORT_SYMBOL_GPL(crypto_ahash_export); 652 653 int crypto_ahash_import_core(struct ahash_request *req, const void *in) 654 { 655 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 656 657 if (likely(tfm->using_shash)) 658 return crypto_shash_import_core(prepare_shash_desc(req, tfm), 659 in); 660 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 661 return -ENOKEY; 662 return crypto_ahash_alg(tfm)->import_core(req, in); 663 } 664 EXPORT_SYMBOL_GPL(crypto_ahash_import_core); 665 666 int crypto_ahash_import(struct ahash_request *req, const void *in) 667 { 668 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 669 670 if (likely(tfm->using_shash)) 671 return crypto_shash_import(prepare_shash_desc(req, tfm), in); 672 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 673 return -ENOKEY; 674 if (crypto_ahash_block_only(tfm)) { 675 unsigned int reqsize = crypto_ahash_reqsize(tfm); 676 u8 *buf = ahash_request_ctx(req); 677 678 buf[reqsize - 1] = 0; 679 } 680 return crypto_ahash_alg(tfm)->import(req, in); 681 } 682 EXPORT_SYMBOL_GPL(crypto_ahash_import); 683 684 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm) 685 { 686 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 687 struct ahash_alg *alg = crypto_ahash_alg(hash); 688 689 if (alg->exit_tfm) 690 alg->exit_tfm(hash); 691 else if (tfm->__crt_alg->cra_exit) 692 tfm->__crt_alg->cra_exit(tfm); 693 694 if (crypto_ahash_need_fallback(hash)) 695 crypto_free_ahash(crypto_ahash_fb(hash)); 696 } 697 698 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) 699 { 700 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 701 struct ahash_alg *alg = crypto_ahash_alg(hash); 702 struct crypto_ahash *fb = NULL; 703 int err; 704 705 crypto_ahash_set_statesize(hash, alg->halg.statesize); 706 crypto_ahash_set_reqsize(hash, crypto_tfm_alg_reqsize(tfm)); 707 708 if (tfm->__crt_alg->cra_type == &crypto_shash_type) 709 return crypto_init_ahash_using_shash(tfm); 710 711 if (crypto_ahash_need_fallback(hash)) { 712 fb = crypto_alloc_ahash(crypto_ahash_alg_name(hash), 713 CRYPTO_ALG_REQ_VIRT, 714 CRYPTO_ALG_ASYNC | 715 CRYPTO_ALG_REQ_VIRT | 716 CRYPTO_AHASH_ALG_NO_EXPORT_CORE); 717 if (IS_ERR(fb)) 718 return PTR_ERR(fb); 719 720 tfm->fb = crypto_ahash_tfm(fb); 721 } 722 723 ahash_set_needkey(hash, alg); 724 725 tfm->exit = crypto_ahash_exit_tfm; 726 727 if (alg->init_tfm) 728 err = alg->init_tfm(hash); 729 else if (tfm->__crt_alg->cra_init) 730 err = tfm->__crt_alg->cra_init(tfm); 731 else 732 return 0; 733 734 if (err) 735 goto out_free_sync_hash; 736 737 if (!ahash_is_async(hash) && crypto_ahash_reqsize(hash) > 738 MAX_SYNC_HASH_REQSIZE) 739 goto out_exit_tfm; 740 741 BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE); 742 if (crypto_ahash_reqsize(hash) < HASH_MAX_DESCSIZE) 743 crypto_ahash_set_reqsize(hash, HASH_MAX_DESCSIZE); 744 745 return 0; 746 747 out_exit_tfm: 748 if (alg->exit_tfm) 749 alg->exit_tfm(hash); 750 else if (tfm->__crt_alg->cra_exit) 751 tfm->__crt_alg->cra_exit(tfm); 752 err = -EINVAL; 753 out_free_sync_hash: 754 crypto_free_ahash(fb); 755 return err; 756 } 757 758 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) 759 { 760 if (alg->cra_type == &crypto_shash_type) 761 return sizeof(struct crypto_shash *); 762 763 return crypto_alg_extsize(alg); 764 } 765 766 static void crypto_ahash_free_instance(struct crypto_instance *inst) 767 { 768 struct ahash_instance *ahash = ahash_instance(inst); 769 770 ahash->free(ahash); 771 } 772 773 static int __maybe_unused crypto_ahash_report( 774 struct sk_buff *skb, struct crypto_alg *alg) 775 { 776 struct crypto_report_hash rhash; 777 778 memset(&rhash, 0, sizeof(rhash)); 779 780 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 781 782 rhash.blocksize = alg->cra_blocksize; 783 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize; 784 785 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); 786 } 787 788 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 789 __maybe_unused; 790 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) 791 { 792 seq_printf(m, "type : ahash\n"); 793 seq_printf(m, "async : %s\n", 794 str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC)); 795 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 796 seq_printf(m, "digestsize : %u\n", 797 __crypto_hash_alg_common(alg)->digestsize); 798 } 799 800 static const struct crypto_type crypto_ahash_type = { 801 .extsize = crypto_ahash_extsize, 802 .init_tfm = crypto_ahash_init_tfm, 803 .free = crypto_ahash_free_instance, 804 #ifdef CONFIG_PROC_FS 805 .show = crypto_ahash_show, 806 #endif 807 #if IS_ENABLED(CONFIG_CRYPTO_USER) 808 .report = crypto_ahash_report, 809 #endif 810 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 811 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK, 812 .type = CRYPTO_ALG_TYPE_AHASH, 813 .tfmsize = offsetof(struct crypto_ahash, base), 814 .algsize = offsetof(struct ahash_alg, halg.base), 815 }; 816 817 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn, 818 struct crypto_instance *inst, 819 const char *name, u32 type, u32 mask) 820 { 821 spawn->base.frontend = &crypto_ahash_type; 822 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 823 } 824 EXPORT_SYMBOL_GPL(crypto_grab_ahash); 825 826 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type, 827 u32 mask) 828 { 829 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask); 830 } 831 EXPORT_SYMBOL_GPL(crypto_alloc_ahash); 832 833 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask) 834 { 835 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask); 836 } 837 EXPORT_SYMBOL_GPL(crypto_has_ahash); 838 839 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg) 840 { 841 struct crypto_alg *alg = &halg->base; 842 843 if (alg->cra_type == &crypto_shash_type) 844 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg)); 845 846 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey; 847 } 848 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey); 849 850 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash) 851 { 852 struct hash_alg_common *halg = crypto_hash_alg_common(hash); 853 struct crypto_tfm *tfm = crypto_ahash_tfm(hash); 854 struct crypto_ahash *fb = NULL; 855 struct crypto_ahash *nhash; 856 struct ahash_alg *alg; 857 int err; 858 859 if (!crypto_hash_alg_has_setkey(halg)) { 860 tfm = crypto_tfm_get(tfm); 861 if (IS_ERR(tfm)) 862 return ERR_CAST(tfm); 863 864 return hash; 865 } 866 867 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm); 868 869 if (IS_ERR(nhash)) 870 return nhash; 871 872 nhash->reqsize = hash->reqsize; 873 nhash->statesize = hash->statesize; 874 875 if (likely(hash->using_shash)) { 876 struct crypto_shash **nctx = crypto_ahash_ctx(nhash); 877 struct crypto_shash *shash; 878 879 shash = crypto_clone_shash(ahash_to_shash(hash)); 880 if (IS_ERR(shash)) { 881 err = PTR_ERR(shash); 882 goto out_free_nhash; 883 } 884 crypto_ahash_tfm(nhash)->exit = crypto_exit_ahash_using_shash; 885 nhash->using_shash = true; 886 *nctx = shash; 887 return nhash; 888 } 889 890 if (crypto_ahash_need_fallback(hash)) { 891 fb = crypto_clone_ahash(crypto_ahash_fb(hash)); 892 err = PTR_ERR(fb); 893 if (IS_ERR(fb)) 894 goto out_free_nhash; 895 896 crypto_ahash_tfm(nhash)->fb = crypto_ahash_tfm(fb); 897 } 898 899 err = -ENOSYS; 900 alg = crypto_ahash_alg(hash); 901 if (!alg->clone_tfm) 902 goto out_free_fb; 903 904 err = alg->clone_tfm(nhash, hash); 905 if (err) 906 goto out_free_fb; 907 908 crypto_ahash_tfm(nhash)->exit = crypto_ahash_exit_tfm; 909 910 return nhash; 911 912 out_free_fb: 913 crypto_free_ahash(fb); 914 out_free_nhash: 915 crypto_free_ahash(nhash); 916 return ERR_PTR(err); 917 } 918 EXPORT_SYMBOL_GPL(crypto_clone_ahash); 919 920 static int ahash_default_export_core(struct ahash_request *req, void *out) 921 { 922 return -ENOSYS; 923 } 924 925 static int ahash_default_import_core(struct ahash_request *req, const void *in) 926 { 927 return -ENOSYS; 928 } 929 930 static int ahash_prepare_alg(struct ahash_alg *alg) 931 { 932 struct crypto_alg *base = &alg->halg.base; 933 int err; 934 935 if (alg->halg.statesize == 0) 936 return -EINVAL; 937 938 if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize) 939 return -EINVAL; 940 941 if (!(base->cra_flags & CRYPTO_ALG_ASYNC) && 942 base->cra_reqsize > MAX_SYNC_HASH_REQSIZE) 943 return -EINVAL; 944 945 if (base->cra_flags & CRYPTO_ALG_NEED_FALLBACK && 946 base->cra_flags & CRYPTO_ALG_NO_FALLBACK) 947 return -EINVAL; 948 949 err = hash_prepare_alg(&alg->halg); 950 if (err) 951 return err; 952 953 base->cra_type = &crypto_ahash_type; 954 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH; 955 956 if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) & 957 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT) && 958 !(base->cra_flags & CRYPTO_ALG_NO_FALLBACK)) 959 base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK; 960 961 if (!alg->setkey) 962 alg->setkey = ahash_nosetkey; 963 964 if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) { 965 BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256); 966 if (!alg->finup) 967 return -EINVAL; 968 969 base->cra_reqsize += base->cra_blocksize + 1; 970 alg->halg.statesize += base->cra_blocksize + 1; 971 alg->export_core = alg->export; 972 alg->import_core = alg->import; 973 } else if (!alg->export_core || !alg->import_core) { 974 alg->export_core = ahash_default_export_core; 975 alg->import_core = ahash_default_import_core; 976 base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE; 977 } 978 979 return 0; 980 } 981 982 int crypto_register_ahash(struct ahash_alg *alg) 983 { 984 struct crypto_alg *base = &alg->halg.base; 985 int err; 986 987 err = ahash_prepare_alg(alg); 988 if (err) 989 return err; 990 991 return crypto_register_alg(base); 992 } 993 EXPORT_SYMBOL_GPL(crypto_register_ahash); 994 995 void crypto_unregister_ahash(struct ahash_alg *alg) 996 { 997 crypto_unregister_alg(&alg->halg.base); 998 } 999 EXPORT_SYMBOL_GPL(crypto_unregister_ahash); 1000 1001 int crypto_register_ahashes(struct ahash_alg *algs, int count) 1002 { 1003 int i, ret; 1004 1005 for (i = 0; i < count; i++) { 1006 ret = crypto_register_ahash(&algs[i]); 1007 if (ret) 1008 goto err; 1009 } 1010 1011 return 0; 1012 1013 err: 1014 for (--i; i >= 0; --i) 1015 crypto_unregister_ahash(&algs[i]); 1016 1017 return ret; 1018 } 1019 EXPORT_SYMBOL_GPL(crypto_register_ahashes); 1020 1021 void crypto_unregister_ahashes(struct ahash_alg *algs, int count) 1022 { 1023 int i; 1024 1025 for (i = count - 1; i >= 0; --i) 1026 crypto_unregister_ahash(&algs[i]); 1027 } 1028 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes); 1029 1030 int ahash_register_instance(struct crypto_template *tmpl, 1031 struct ahash_instance *inst) 1032 { 1033 int err; 1034 1035 if (WARN_ON(!inst->free)) 1036 return -EINVAL; 1037 1038 err = ahash_prepare_alg(&inst->alg); 1039 if (err) 1040 return err; 1041 1042 return crypto_register_instance(tmpl, ahash_crypto_instance(inst)); 1043 } 1044 EXPORT_SYMBOL_GPL(ahash_register_instance); 1045 1046 void ahash_request_free(struct ahash_request *req) 1047 { 1048 if (unlikely(!req)) 1049 return; 1050 1051 if (!ahash_req_on_stack(req)) { 1052 kfree(req); 1053 return; 1054 } 1055 1056 ahash_request_zero(req); 1057 } 1058 EXPORT_SYMBOL_GPL(ahash_request_free); 1059 1060 int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data, 1061 unsigned int len, u8 *out) 1062 { 1063 HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm)); 1064 int err; 1065 1066 ahash_request_set_callback(req, 0, NULL, NULL); 1067 ahash_request_set_virt(req, data, out, len); 1068 err = crypto_ahash_digest(req); 1069 1070 ahash_request_zero(req); 1071 1072 return err; 1073 } 1074 EXPORT_SYMBOL_GPL(crypto_hash_digest); 1075 1076 void ahash_free_singlespawn_instance(struct ahash_instance *inst) 1077 { 1078 crypto_drop_spawn(ahash_instance_ctx(inst)); 1079 kfree(inst); 1080 } 1081 EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance); 1082 1083 MODULE_LICENSE("GPL"); 1084 MODULE_DESCRIPTION("Asynchronous cryptographic hash type"); 1085