1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Cryptographic API for algorithms (i.e., low-level API). 4 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #include <crypto/algapi.h> 9 #include <linux/err.h> 10 #include <linux/errno.h> 11 #include <linux/fips.h> 12 #include <linux/init.h> 13 #include <linux/kernel.h> 14 #include <linux/list.h> 15 #include <linux/module.h> 16 #include <linux/rtnetlink.h> 17 #include <linux/slab.h> 18 #include <linux/string.h> 19 #include <linux/workqueue.h> 20 21 #include "internal.h" 22 23 static LIST_HEAD(crypto_template_list); 24 25 static inline void crypto_check_module_sig(struct module *mod) 26 { 27 if (fips_enabled && mod && !module_sig_ok(mod)) 28 panic("Module %s signature verification failed in FIPS mode\n", 29 module_name(mod)); 30 } 31 32 static int crypto_check_alg(struct crypto_alg *alg) 33 { 34 crypto_check_module_sig(alg->cra_module); 35 36 if (!alg->cra_name[0] || !alg->cra_driver_name[0]) 37 return -EINVAL; 38 39 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 40 return -EINVAL; 41 42 /* General maximums for all algs. */ 43 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 44 return -EINVAL; 45 46 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 47 return -EINVAL; 48 49 /* Lower maximums for specific alg types. */ 50 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 51 CRYPTO_ALG_TYPE_CIPHER) { 52 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 53 return -EINVAL; 54 55 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 56 return -EINVAL; 57 } 58 59 if (alg->cra_priority < 0) 60 return -EINVAL; 61 62 refcount_set(&alg->cra_refcnt, 1); 63 64 return 0; 65 } 66 67 static void crypto_free_instance(struct crypto_instance *inst) 68 { 69 inst->alg.cra_type->free(inst); 70 } 71 72 static void crypto_destroy_instance_workfn(struct work_struct *w) 73 { 74 struct crypto_template *tmpl = container_of(w, struct crypto_template, 75 free_work); 76 struct crypto_instance *inst; 77 struct hlist_node *n; 78 HLIST_HEAD(list); 79 80 down_write(&crypto_alg_sem); 81 hlist_for_each_entry_safe(inst, n, &tmpl->dead, list) { 82 if (refcount_read(&inst->alg.cra_refcnt) != -1) 83 continue; 84 hlist_del(&inst->list); 85 hlist_add_head(&inst->list, &list); 86 } 87 up_write(&crypto_alg_sem); 88 89 hlist_for_each_entry_safe(inst, n, &list, list) 90 crypto_free_instance(inst); 91 } 92 93 static void crypto_destroy_instance(struct crypto_alg *alg) 94 { 95 struct crypto_instance *inst = container_of(alg, 96 struct crypto_instance, 97 alg); 98 struct crypto_template *tmpl = inst->tmpl; 99 100 refcount_set(&alg->cra_refcnt, -1); 101 schedule_work(&tmpl->free_work); 102 } 103 104 /* 105 * This function adds a spawn to the list secondary_spawns which 106 * will be used at the end of crypto_remove_spawns to unregister 107 * instances, unless the spawn happens to be one that is depended 108 * on by the new algorithm (nalg in crypto_remove_spawns). 109 * 110 * This function is also responsible for resurrecting any algorithms 111 * in the dependency chain of nalg by unsetting n->dead. 112 */ 113 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 114 struct list_head *stack, 115 struct list_head *top, 116 struct list_head *secondary_spawns) 117 { 118 struct crypto_spawn *spawn, *n; 119 120 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 121 if (!spawn) 122 return NULL; 123 124 n = list_prev_entry(spawn, list); 125 list_move(&spawn->list, secondary_spawns); 126 127 if (list_is_last(&n->list, stack)) 128 return top; 129 130 n = list_next_entry(n, list); 131 if (!spawn->dead) 132 n->dead = false; 133 134 return &n->inst->alg.cra_users; 135 } 136 137 static void crypto_remove_instance(struct crypto_instance *inst, 138 struct list_head *list) 139 { 140 struct crypto_template *tmpl = inst->tmpl; 141 142 if (crypto_is_dead(&inst->alg)) 143 return; 144 145 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 146 147 if (!tmpl) 148 return; 149 150 list_del_init(&inst->alg.cra_list); 151 hlist_del(&inst->list); 152 hlist_add_head(&inst->list, &tmpl->dead); 153 154 BUG_ON(!list_empty(&inst->alg.cra_users)); 155 156 crypto_alg_put(&inst->alg); 157 } 158 159 /* 160 * Given an algorithm alg, remove all algorithms that depend on it 161 * through spawns. If nalg is not null, then exempt any algorithms 162 * that is depended on by nalg. This is useful when nalg itself 163 * depends on alg. 164 */ 165 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 166 struct crypto_alg *nalg) 167 { 168 u32 new_type = (nalg ?: alg)->cra_flags; 169 struct crypto_spawn *spawn, *n; 170 LIST_HEAD(secondary_spawns); 171 struct list_head *spawns; 172 LIST_HEAD(stack); 173 LIST_HEAD(top); 174 175 spawns = &alg->cra_users; 176 list_for_each_entry_safe(spawn, n, spawns, list) { 177 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 178 continue; 179 180 list_move(&spawn->list, &top); 181 } 182 183 /* 184 * Perform a depth-first walk starting from alg through 185 * the cra_users tree. The list stack records the path 186 * from alg to the current spawn. 187 */ 188 spawns = ⊤ 189 do { 190 while (!list_empty(spawns)) { 191 struct crypto_instance *inst; 192 193 spawn = list_first_entry(spawns, struct crypto_spawn, 194 list); 195 inst = spawn->inst; 196 197 list_move(&spawn->list, &stack); 198 spawn->dead = !spawn->registered || &inst->alg != nalg; 199 200 if (!spawn->registered) 201 break; 202 203 BUG_ON(&inst->alg == alg); 204 205 if (&inst->alg == nalg) 206 break; 207 208 spawns = &inst->alg.cra_users; 209 210 /* 211 * Even if spawn->registered is true, the 212 * instance itself may still be unregistered. 213 * This is because it may have failed during 214 * registration. Therefore we still need to 215 * make the following test. 216 * 217 * We may encounter an unregistered instance here, since 218 * an instance's spawns are set up prior to the instance 219 * being registered. An unregistered instance will have 220 * NULL ->cra_users.next, since ->cra_users isn't 221 * properly initialized until registration. But an 222 * unregistered instance cannot have any users, so treat 223 * it the same as ->cra_users being empty. 224 */ 225 if (spawns->next == NULL) 226 break; 227 } 228 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 229 &secondary_spawns))); 230 231 /* 232 * Remove all instances that are marked as dead. Also 233 * complete the resurrection of the others by moving them 234 * back to the cra_users list. 235 */ 236 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 237 if (!spawn->dead) 238 list_move(&spawn->list, &spawn->alg->cra_users); 239 else if (spawn->registered) 240 crypto_remove_instance(spawn->inst, list); 241 } 242 } 243 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 244 245 static void crypto_alg_finish_registration(struct crypto_alg *alg, 246 struct list_head *algs_to_put) 247 __must_hold(&crypto_alg_sem) 248 { 249 struct crypto_alg *q; 250 251 list_for_each_entry(q, &crypto_alg_list, cra_list) { 252 if (q == alg) 253 continue; 254 255 if (crypto_is_moribund(q)) 256 continue; 257 258 if (crypto_is_larval(q)) 259 continue; 260 261 if (strcmp(alg->cra_name, q->cra_name)) 262 continue; 263 264 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 265 q->cra_priority > alg->cra_priority) 266 continue; 267 268 crypto_remove_spawns(q, algs_to_put, alg); 269 } 270 271 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg); 272 } 273 274 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg) 275 { 276 struct crypto_larval *larval; 277 278 if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS) || 279 (alg->cra_flags & CRYPTO_ALG_INTERNAL)) 280 return NULL; /* No self-test needed */ 281 282 larval = crypto_larval_alloc(alg->cra_name, 283 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 284 if (IS_ERR(larval)) 285 return larval; 286 287 larval->adult = crypto_mod_get(alg); 288 if (!larval->adult) { 289 kfree(larval); 290 return ERR_PTR(-ENOENT); 291 } 292 293 refcount_set(&larval->alg.cra_refcnt, 1); 294 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 295 CRYPTO_MAX_ALG_NAME); 296 larval->alg.cra_priority = alg->cra_priority; 297 298 return larval; 299 } 300 301 static struct crypto_larval * 302 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put) 303 __must_hold(&crypto_alg_sem) 304 { 305 struct crypto_alg *q; 306 struct crypto_larval *larval; 307 int ret = -EAGAIN; 308 309 if (crypto_is_dead(alg)) 310 goto err; 311 312 INIT_LIST_HEAD(&alg->cra_users); 313 314 ret = -EEXIST; 315 316 list_for_each_entry(q, &crypto_alg_list, cra_list) { 317 if (q == alg) 318 goto err; 319 320 if (crypto_is_moribund(q)) 321 continue; 322 323 if (crypto_is_larval(q)) { 324 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 325 goto err; 326 continue; 327 } 328 329 if (!strcmp(q->cra_driver_name, alg->cra_name) || 330 !strcmp(q->cra_driver_name, alg->cra_driver_name) || 331 !strcmp(q->cra_name, alg->cra_driver_name)) 332 goto err; 333 } 334 335 larval = crypto_alloc_test_larval(alg); 336 if (IS_ERR(larval)) 337 goto out; 338 339 list_add(&alg->cra_list, &crypto_alg_list); 340 341 if (larval) { 342 /* No cheating! */ 343 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 344 345 list_add(&larval->alg.cra_list, &crypto_alg_list); 346 } else { 347 alg->cra_flags |= CRYPTO_ALG_TESTED; 348 crypto_alg_finish_registration(alg, algs_to_put); 349 } 350 351 out: 352 return larval; 353 354 err: 355 larval = ERR_PTR(ret); 356 goto out; 357 } 358 359 void crypto_alg_tested(const char *name, int err) 360 { 361 struct crypto_larval *test; 362 struct crypto_alg *alg; 363 struct crypto_alg *q; 364 LIST_HEAD(list); 365 366 down_write(&crypto_alg_sem); 367 list_for_each_entry(q, &crypto_alg_list, cra_list) { 368 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 369 continue; 370 371 test = (struct crypto_larval *)q; 372 373 if (!strcmp(q->cra_driver_name, name)) 374 goto found; 375 } 376 377 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 378 up_write(&crypto_alg_sem); 379 return; 380 381 found: 382 q->cra_flags |= CRYPTO_ALG_DEAD; 383 alg = test->adult; 384 385 if (crypto_is_dead(alg)) 386 goto complete; 387 388 if (err == -ECANCELED) 389 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL; 390 else if (err) 391 goto complete; 392 else 393 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL; 394 395 alg->cra_flags |= CRYPTO_ALG_TESTED; 396 397 crypto_alg_finish_registration(alg, &list); 398 399 complete: 400 list_del_init(&test->alg.cra_list); 401 complete_all(&test->completion); 402 403 up_write(&crypto_alg_sem); 404 405 crypto_alg_put(&test->alg); 406 crypto_remove_final(&list); 407 } 408 EXPORT_SYMBOL_GPL(crypto_alg_tested); 409 410 void crypto_remove_final(struct list_head *list) 411 { 412 struct crypto_alg *alg; 413 struct crypto_alg *n; 414 415 list_for_each_entry_safe(alg, n, list, cra_list) { 416 list_del_init(&alg->cra_list); 417 crypto_alg_put(alg); 418 } 419 } 420 EXPORT_SYMBOL_GPL(crypto_remove_final); 421 422 static void crypto_free_alg(struct crypto_alg *alg) 423 { 424 unsigned int algsize = alg->cra_type->algsize; 425 u8 *p = (u8 *)alg - algsize; 426 427 crypto_destroy_alg(alg); 428 kfree(p); 429 } 430 431 int crypto_register_alg(struct crypto_alg *alg) 432 { 433 struct crypto_larval *larval; 434 bool test_started = false; 435 LIST_HEAD(algs_to_put); 436 int err; 437 438 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 439 err = crypto_check_alg(alg); 440 if (err) 441 return err; 442 443 if (alg->cra_flags & CRYPTO_ALG_DUP_FIRST && 444 !WARN_ON_ONCE(alg->cra_destroy)) { 445 unsigned int algsize = alg->cra_type->algsize; 446 u8 *p = (u8 *)alg - algsize; 447 448 p = kmemdup(p, algsize + sizeof(*alg), GFP_KERNEL); 449 if (!p) 450 return -ENOMEM; 451 452 alg = (void *)(p + algsize); 453 alg->cra_destroy = crypto_free_alg; 454 } 455 456 down_write(&crypto_alg_sem); 457 larval = __crypto_register_alg(alg, &algs_to_put); 458 if (!IS_ERR_OR_NULL(larval)) { 459 test_started = crypto_boot_test_finished(); 460 larval->test_started = test_started; 461 } 462 up_write(&crypto_alg_sem); 463 464 if (IS_ERR(larval)) { 465 crypto_alg_put(alg); 466 return PTR_ERR(larval); 467 } 468 469 if (test_started) 470 crypto_schedule_test(larval); 471 else 472 crypto_remove_final(&algs_to_put); 473 474 return 0; 475 } 476 EXPORT_SYMBOL_GPL(crypto_register_alg); 477 478 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 479 { 480 if (unlikely(list_empty(&alg->cra_list))) 481 return -ENOENT; 482 483 alg->cra_flags |= CRYPTO_ALG_DEAD; 484 485 list_del_init(&alg->cra_list); 486 crypto_remove_spawns(alg, list, NULL); 487 488 return 0; 489 } 490 491 void crypto_unregister_alg(struct crypto_alg *alg) 492 { 493 int ret; 494 LIST_HEAD(list); 495 496 down_write(&crypto_alg_sem); 497 ret = crypto_remove_alg(alg, &list); 498 up_write(&crypto_alg_sem); 499 500 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name)) 501 return; 502 503 WARN_ON(!alg->cra_destroy && refcount_read(&alg->cra_refcnt) != 1); 504 505 list_add(&alg->cra_list, &list); 506 crypto_remove_final(&list); 507 } 508 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 509 510 int crypto_register_algs(struct crypto_alg *algs, int count) 511 { 512 int i, ret; 513 514 for (i = 0; i < count; i++) { 515 ret = crypto_register_alg(&algs[i]); 516 if (ret) { 517 crypto_unregister_algs(algs, i); 518 return ret; 519 } 520 } 521 522 return 0; 523 } 524 EXPORT_SYMBOL_GPL(crypto_register_algs); 525 526 void crypto_unregister_algs(struct crypto_alg *algs, int count) 527 { 528 int i; 529 530 for (i = count - 1; i >= 0; --i) 531 crypto_unregister_alg(&algs[i]); 532 } 533 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 534 535 int crypto_register_template(struct crypto_template *tmpl) 536 { 537 struct crypto_template *q; 538 int err = -EEXIST; 539 540 INIT_WORK(&tmpl->free_work, crypto_destroy_instance_workfn); 541 542 down_write(&crypto_alg_sem); 543 544 crypto_check_module_sig(tmpl->module); 545 546 list_for_each_entry(q, &crypto_template_list, list) { 547 if (q == tmpl) 548 goto out; 549 } 550 551 list_add(&tmpl->list, &crypto_template_list); 552 err = 0; 553 out: 554 up_write(&crypto_alg_sem); 555 return err; 556 } 557 EXPORT_SYMBOL_GPL(crypto_register_template); 558 559 int crypto_register_templates(struct crypto_template *tmpls, int count) 560 { 561 int i, err; 562 563 for (i = 0; i < count; i++) { 564 err = crypto_register_template(&tmpls[i]); 565 if (err) 566 goto out; 567 } 568 return 0; 569 570 out: 571 for (--i; i >= 0; --i) 572 crypto_unregister_template(&tmpls[i]); 573 return err; 574 } 575 EXPORT_SYMBOL_GPL(crypto_register_templates); 576 577 void crypto_unregister_template(struct crypto_template *tmpl) 578 { 579 struct crypto_instance *inst; 580 struct hlist_node *n; 581 struct hlist_head *list; 582 LIST_HEAD(users); 583 584 down_write(&crypto_alg_sem); 585 586 BUG_ON(list_empty(&tmpl->list)); 587 list_del_init(&tmpl->list); 588 589 list = &tmpl->instances; 590 hlist_for_each_entry(inst, list, list) { 591 int err = crypto_remove_alg(&inst->alg, &users); 592 593 BUG_ON(err); 594 } 595 596 up_write(&crypto_alg_sem); 597 598 hlist_for_each_entry_safe(inst, n, list, list) { 599 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 600 crypto_free_instance(inst); 601 } 602 crypto_remove_final(&users); 603 604 flush_work(&tmpl->free_work); 605 } 606 EXPORT_SYMBOL_GPL(crypto_unregister_template); 607 608 void crypto_unregister_templates(struct crypto_template *tmpls, int count) 609 { 610 int i; 611 612 for (i = count - 1; i >= 0; --i) 613 crypto_unregister_template(&tmpls[i]); 614 } 615 EXPORT_SYMBOL_GPL(crypto_unregister_templates); 616 617 static struct crypto_template *__crypto_lookup_template(const char *name) 618 { 619 struct crypto_template *q, *tmpl = NULL; 620 621 down_read(&crypto_alg_sem); 622 list_for_each_entry(q, &crypto_template_list, list) { 623 if (strcmp(q->name, name)) 624 continue; 625 if (unlikely(!crypto_tmpl_get(q))) 626 continue; 627 628 tmpl = q; 629 break; 630 } 631 up_read(&crypto_alg_sem); 632 633 return tmpl; 634 } 635 636 struct crypto_template *crypto_lookup_template(const char *name) 637 { 638 return try_then_request_module(__crypto_lookup_template(name), 639 "crypto-%s", name); 640 } 641 EXPORT_SYMBOL_GPL(crypto_lookup_template); 642 643 int crypto_register_instance(struct crypto_template *tmpl, 644 struct crypto_instance *inst) 645 { 646 struct crypto_larval *larval; 647 struct crypto_spawn *spawn; 648 u32 fips_internal = 0; 649 LIST_HEAD(algs_to_put); 650 int err; 651 652 err = crypto_check_alg(&inst->alg); 653 if (err) 654 return err; 655 656 inst->alg.cra_module = tmpl->module; 657 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 658 inst->alg.cra_destroy = crypto_destroy_instance; 659 660 down_write(&crypto_alg_sem); 661 662 larval = ERR_PTR(-EAGAIN); 663 for (spawn = inst->spawns; spawn;) { 664 struct crypto_spawn *next; 665 666 if (spawn->dead) 667 goto unlock; 668 669 next = spawn->next; 670 spawn->inst = inst; 671 spawn->registered = true; 672 673 fips_internal |= spawn->alg->cra_flags; 674 675 crypto_mod_put(spawn->alg); 676 677 spawn = next; 678 } 679 680 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL); 681 682 larval = __crypto_register_alg(&inst->alg, &algs_to_put); 683 if (IS_ERR(larval)) 684 goto unlock; 685 else if (larval) 686 larval->test_started = true; 687 688 hlist_add_head(&inst->list, &tmpl->instances); 689 inst->tmpl = tmpl; 690 691 unlock: 692 up_write(&crypto_alg_sem); 693 694 if (IS_ERR(larval)) 695 return PTR_ERR(larval); 696 697 if (larval) 698 crypto_schedule_test(larval); 699 else 700 crypto_remove_final(&algs_to_put); 701 702 return 0; 703 } 704 EXPORT_SYMBOL_GPL(crypto_register_instance); 705 706 void crypto_unregister_instance(struct crypto_instance *inst) 707 { 708 LIST_HEAD(list); 709 710 down_write(&crypto_alg_sem); 711 712 crypto_remove_spawns(&inst->alg, &list, NULL); 713 crypto_remove_instance(inst, &list); 714 715 up_write(&crypto_alg_sem); 716 717 crypto_remove_final(&list); 718 } 719 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 720 721 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, 722 const char *name, u32 type, u32 mask) 723 { 724 struct crypto_alg *alg; 725 int err = -EAGAIN; 726 727 if (WARN_ON_ONCE(inst == NULL)) 728 return -EINVAL; 729 730 /* Allow the result of crypto_attr_alg_name() to be passed directly */ 731 if (IS_ERR(name)) 732 return PTR_ERR(name); 733 734 alg = crypto_find_alg(name, spawn->frontend, 735 type | CRYPTO_ALG_FIPS_INTERNAL, mask); 736 if (IS_ERR(alg)) 737 return PTR_ERR(alg); 738 739 down_write(&crypto_alg_sem); 740 if (!crypto_is_moribund(alg)) { 741 list_add(&spawn->list, &alg->cra_users); 742 spawn->alg = alg; 743 spawn->mask = mask; 744 spawn->next = inst->spawns; 745 inst->spawns = spawn; 746 inst->alg.cra_flags |= 747 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS); 748 err = 0; 749 } 750 up_write(&crypto_alg_sem); 751 if (err) 752 crypto_mod_put(alg); 753 return err; 754 } 755 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 756 757 void crypto_drop_spawn(struct crypto_spawn *spawn) 758 { 759 if (!spawn->alg) /* not yet initialized? */ 760 return; 761 762 down_write(&crypto_alg_sem); 763 if (!spawn->dead) 764 list_del(&spawn->list); 765 up_write(&crypto_alg_sem); 766 767 if (!spawn->registered) 768 crypto_mod_put(spawn->alg); 769 } 770 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 771 772 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 773 { 774 struct crypto_alg *alg = ERR_PTR(-EAGAIN); 775 struct crypto_alg *target; 776 bool shoot = false; 777 778 down_read(&crypto_alg_sem); 779 if (!spawn->dead) { 780 alg = spawn->alg; 781 if (!crypto_mod_get(alg)) { 782 target = crypto_alg_get(alg); 783 shoot = true; 784 alg = ERR_PTR(-EAGAIN); 785 } 786 } 787 up_read(&crypto_alg_sem); 788 789 if (shoot) { 790 crypto_shoot_alg(target); 791 crypto_alg_put(target); 792 } 793 794 return alg; 795 } 796 797 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 798 u32 mask) 799 { 800 struct crypto_alg *alg; 801 struct crypto_tfm *tfm; 802 803 alg = crypto_spawn_alg(spawn); 804 if (IS_ERR(alg)) 805 return ERR_CAST(alg); 806 807 tfm = ERR_PTR(-EINVAL); 808 if (unlikely((alg->cra_flags ^ type) & mask)) 809 goto out_put_alg; 810 811 tfm = __crypto_alloc_tfm(alg, type, mask); 812 if (IS_ERR(tfm)) 813 goto out_put_alg; 814 815 return tfm; 816 817 out_put_alg: 818 crypto_mod_put(alg); 819 return tfm; 820 } 821 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 822 823 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 824 { 825 struct crypto_alg *alg; 826 struct crypto_tfm *tfm; 827 828 alg = crypto_spawn_alg(spawn); 829 if (IS_ERR(alg)) 830 return ERR_CAST(alg); 831 832 tfm = crypto_create_tfm(alg, spawn->frontend); 833 if (IS_ERR(tfm)) 834 goto out_put_alg; 835 836 return tfm; 837 838 out_put_alg: 839 crypto_mod_put(alg); 840 return tfm; 841 } 842 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 843 844 int crypto_register_notifier(struct notifier_block *nb) 845 { 846 return blocking_notifier_chain_register(&crypto_chain, nb); 847 } 848 EXPORT_SYMBOL_GPL(crypto_register_notifier); 849 850 int crypto_unregister_notifier(struct notifier_block *nb) 851 { 852 return blocking_notifier_chain_unregister(&crypto_chain, nb); 853 } 854 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 855 856 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 857 { 858 struct rtattr *rta = tb[0]; 859 struct crypto_attr_type *algt; 860 861 if (!rta) 862 return ERR_PTR(-ENOENT); 863 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 864 return ERR_PTR(-EINVAL); 865 if (rta->rta_type != CRYPTOA_TYPE) 866 return ERR_PTR(-EINVAL); 867 868 algt = RTA_DATA(rta); 869 870 return algt; 871 } 872 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 873 874 /** 875 * crypto_check_attr_type() - check algorithm type and compute inherited mask 876 * @tb: the template parameters 877 * @type: the algorithm type the template would be instantiated as 878 * @mask_ret: (output) the mask that should be passed to crypto_grab_*() 879 * to restrict the flags of any inner algorithms 880 * 881 * Validate that the algorithm type the user requested is compatible with the 882 * one the template would actually be instantiated as. E.g., if the user is 883 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because 884 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm. 885 * 886 * Also compute the mask to use to restrict the flags of any inner algorithms. 887 * 888 * Return: 0 on success; -errno on failure 889 */ 890 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret) 891 { 892 struct crypto_attr_type *algt; 893 894 algt = crypto_get_attr_type(tb); 895 if (IS_ERR(algt)) 896 return PTR_ERR(algt); 897 898 if ((algt->type ^ type) & algt->mask) 899 return -EINVAL; 900 901 *mask_ret = crypto_algt_inherited_mask(algt); 902 return 0; 903 } 904 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 905 906 const char *crypto_attr_alg_name(struct rtattr *rta) 907 { 908 struct crypto_attr_alg *alga; 909 910 if (!rta) 911 return ERR_PTR(-ENOENT); 912 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 913 return ERR_PTR(-EINVAL); 914 if (rta->rta_type != CRYPTOA_ALG) 915 return ERR_PTR(-EINVAL); 916 917 alga = RTA_DATA(rta); 918 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 919 920 return alga->name; 921 } 922 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 923 924 int __crypto_inst_setname(struct crypto_instance *inst, const char *name, 925 const char *driver, struct crypto_alg *alg) 926 { 927 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 928 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 929 return -ENAMETOOLONG; 930 931 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 932 driver, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 933 return -ENAMETOOLONG; 934 935 return 0; 936 } 937 EXPORT_SYMBOL_GPL(__crypto_inst_setname); 938 939 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 940 { 941 INIT_LIST_HEAD(&queue->list); 942 queue->backlog = &queue->list; 943 queue->qlen = 0; 944 queue->max_qlen = max_qlen; 945 } 946 EXPORT_SYMBOL_GPL(crypto_init_queue); 947 948 int crypto_enqueue_request(struct crypto_queue *queue, 949 struct crypto_async_request *request) 950 { 951 int err = -EINPROGRESS; 952 953 if (unlikely(queue->qlen >= queue->max_qlen)) { 954 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 955 err = -ENOSPC; 956 goto out; 957 } 958 err = -EBUSY; 959 if (queue->backlog == &queue->list) 960 queue->backlog = &request->list; 961 } 962 963 queue->qlen++; 964 list_add_tail(&request->list, &queue->list); 965 966 out: 967 return err; 968 } 969 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 970 971 void crypto_enqueue_request_head(struct crypto_queue *queue, 972 struct crypto_async_request *request) 973 { 974 if (unlikely(queue->qlen >= queue->max_qlen)) 975 queue->backlog = queue->backlog->prev; 976 977 queue->qlen++; 978 list_add(&request->list, &queue->list); 979 } 980 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head); 981 982 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 983 { 984 struct list_head *request; 985 986 if (unlikely(!queue->qlen)) 987 return NULL; 988 989 queue->qlen--; 990 991 if (queue->backlog != &queue->list) 992 queue->backlog = queue->backlog->next; 993 994 request = queue->list.next; 995 list_del_init(request); 996 997 return list_entry(request, struct crypto_async_request, list); 998 } 999 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 1000 1001 static inline void crypto_inc_byte(u8 *a, unsigned int size) 1002 { 1003 u8 *b = (a + size); 1004 u8 c; 1005 1006 for (; size; size--) { 1007 c = *--b + 1; 1008 *b = c; 1009 if (c) 1010 break; 1011 } 1012 } 1013 1014 void crypto_inc(u8 *a, unsigned int size) 1015 { 1016 __be32 *b = (__be32 *)(a + size); 1017 u32 c; 1018 1019 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 1020 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 1021 for (; size >= 4; size -= 4) { 1022 c = be32_to_cpu(*--b) + 1; 1023 *b = cpu_to_be32(c); 1024 if (likely(c)) 1025 return; 1026 } 1027 1028 crypto_inc_byte(a, size); 1029 } 1030 EXPORT_SYMBOL_GPL(crypto_inc); 1031 1032 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1033 { 1034 return alg->cra_ctxsize + 1035 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1036 } 1037 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1038 1039 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1040 u32 type, u32 mask) 1041 { 1042 int ret = 0; 1043 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1044 1045 if (!IS_ERR(alg)) { 1046 crypto_mod_put(alg); 1047 ret = 1; 1048 } 1049 1050 return ret; 1051 } 1052 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1053 1054 static void __init crypto_start_tests(void) 1055 { 1056 if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI)) 1057 return; 1058 1059 if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS)) 1060 return; 1061 1062 set_crypto_boot_test_finished(); 1063 1064 for (;;) { 1065 struct crypto_larval *larval = NULL; 1066 struct crypto_alg *q; 1067 1068 down_write(&crypto_alg_sem); 1069 1070 list_for_each_entry(q, &crypto_alg_list, cra_list) { 1071 struct crypto_larval *l; 1072 1073 if (!crypto_is_larval(q)) 1074 continue; 1075 1076 l = (void *)q; 1077 1078 if (!crypto_is_test_larval(l)) 1079 continue; 1080 1081 if (l->test_started) 1082 continue; 1083 1084 l->test_started = true; 1085 larval = l; 1086 break; 1087 } 1088 1089 up_write(&crypto_alg_sem); 1090 1091 if (!larval) 1092 break; 1093 1094 crypto_schedule_test(larval); 1095 } 1096 } 1097 1098 static int __init crypto_algapi_init(void) 1099 { 1100 crypto_init_proc(); 1101 crypto_start_tests(); 1102 return 0; 1103 } 1104 1105 static void __exit crypto_algapi_exit(void) 1106 { 1107 crypto_exit_proc(); 1108 } 1109 1110 /* 1111 * We run this at late_initcall so that all the built-in algorithms 1112 * have had a chance to register themselves first. 1113 */ 1114 late_initcall(crypto_algapi_init); 1115 module_exit(crypto_algapi_exit); 1116 1117 MODULE_LICENSE("GPL"); 1118 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1119 MODULE_SOFTDEP("pre: cryptomgr"); 1120