1 /* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <linux/err.h> 14 #include <linux/errno.h> 15 #include <linux/init.h> 16 #include <linux/kernel.h> 17 #include <linux/list.h> 18 #include <linux/module.h> 19 #include <linux/rtnetlink.h> 20 #include <linux/string.h> 21 22 #include "internal.h" 23 24 static void crypto_remove_final(struct list_head *list); 25 26 static LIST_HEAD(crypto_template_list); 27 28 void crypto_larval_error(const char *name, u32 type, u32 mask) 29 { 30 struct crypto_alg *alg; 31 32 alg = crypto_alg_lookup(name, type, mask); 33 34 if (alg) { 35 if (crypto_is_larval(alg)) { 36 struct crypto_larval *larval = (void *)alg; 37 complete_all(&larval->completion); 38 } 39 crypto_mod_put(alg); 40 } 41 } 42 EXPORT_SYMBOL_GPL(crypto_larval_error); 43 44 static inline int crypto_set_driver_name(struct crypto_alg *alg) 45 { 46 static const char suffix[] = "-generic"; 47 char *driver_name = alg->cra_driver_name; 48 int len; 49 50 if (*driver_name) 51 return 0; 52 53 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 54 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 55 return -ENAMETOOLONG; 56 57 memcpy(driver_name + len, suffix, sizeof(suffix)); 58 return 0; 59 } 60 61 static int crypto_check_alg(struct crypto_alg *alg) 62 { 63 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 64 return -EINVAL; 65 66 if (alg->cra_blocksize > PAGE_SIZE / 8) 67 return -EINVAL; 68 69 if (alg->cra_priority < 0) 70 return -EINVAL; 71 72 return crypto_set_driver_name(alg); 73 } 74 75 static void crypto_destroy_instance(struct crypto_alg *alg) 76 { 77 struct crypto_instance *inst = (void *)alg; 78 struct crypto_template *tmpl = inst->tmpl; 79 80 tmpl->free(inst); 81 crypto_tmpl_put(tmpl); 82 } 83 84 static void crypto_remove_spawn(struct crypto_spawn *spawn, 85 struct list_head *list, 86 struct list_head *secondary_spawns) 87 { 88 struct crypto_instance *inst = spawn->inst; 89 struct crypto_template *tmpl = inst->tmpl; 90 91 list_del_init(&spawn->list); 92 spawn->alg = NULL; 93 94 if (crypto_is_dead(&inst->alg)) 95 return; 96 97 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 98 if (hlist_unhashed(&inst->list)) 99 return; 100 101 if (!tmpl || !crypto_tmpl_get(tmpl)) 102 return; 103 104 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg); 105 list_move(&inst->alg.cra_list, list); 106 hlist_del(&inst->list); 107 inst->alg.cra_destroy = crypto_destroy_instance; 108 109 list_splice(&inst->alg.cra_users, secondary_spawns); 110 } 111 112 static void crypto_remove_spawns(struct list_head *spawns, 113 struct list_head *list, u32 new_type) 114 { 115 struct crypto_spawn *spawn, *n; 116 LIST_HEAD(secondary_spawns); 117 118 list_for_each_entry_safe(spawn, n, spawns, list) { 119 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 120 continue; 121 122 crypto_remove_spawn(spawn, list, &secondary_spawns); 123 } 124 125 while (!list_empty(&secondary_spawns)) { 126 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) 127 crypto_remove_spawn(spawn, list, &secondary_spawns); 128 } 129 } 130 131 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 132 { 133 struct crypto_alg *q; 134 struct crypto_larval *larval; 135 int ret = -EAGAIN; 136 137 if (crypto_is_dead(alg)) 138 goto err; 139 140 INIT_LIST_HEAD(&alg->cra_users); 141 142 /* No cheating! */ 143 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 144 145 ret = -EEXIST; 146 147 atomic_set(&alg->cra_refcnt, 1); 148 list_for_each_entry(q, &crypto_alg_list, cra_list) { 149 if (q == alg) 150 goto err; 151 152 if (crypto_is_larval(q)) { 153 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 154 goto err; 155 continue; 156 } 157 158 if (!strcmp(q->cra_driver_name, alg->cra_name) || 159 !strcmp(q->cra_name, alg->cra_driver_name)) 160 goto err; 161 } 162 163 larval = crypto_larval_alloc(alg->cra_name, 164 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 165 if (IS_ERR(larval)) 166 goto out; 167 168 ret = -ENOENT; 169 larval->adult = crypto_mod_get(alg); 170 if (!larval->adult) 171 goto free_larval; 172 173 atomic_set(&larval->alg.cra_refcnt, 1); 174 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 175 CRYPTO_MAX_ALG_NAME); 176 larval->alg.cra_priority = alg->cra_priority; 177 178 list_add(&alg->cra_list, &crypto_alg_list); 179 list_add(&larval->alg.cra_list, &crypto_alg_list); 180 181 out: 182 return larval; 183 184 free_larval: 185 kfree(larval); 186 err: 187 larval = ERR_PTR(ret); 188 goto out; 189 } 190 191 void crypto_alg_tested(const char *name, int err) 192 { 193 struct crypto_larval *test; 194 struct crypto_alg *alg; 195 struct crypto_alg *q; 196 LIST_HEAD(list); 197 198 down_write(&crypto_alg_sem); 199 list_for_each_entry(q, &crypto_alg_list, cra_list) { 200 if (!crypto_is_larval(q)) 201 continue; 202 203 test = (struct crypto_larval *)q; 204 205 if (!strcmp(q->cra_driver_name, name)) 206 goto found; 207 } 208 209 printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err); 210 goto unlock; 211 212 found: 213 alg = test->adult; 214 if (err || list_empty(&alg->cra_list)) 215 goto complete; 216 217 alg->cra_flags |= CRYPTO_ALG_TESTED; 218 219 list_for_each_entry(q, &crypto_alg_list, cra_list) { 220 if (q == alg) 221 continue; 222 223 if (crypto_is_moribund(q)) 224 continue; 225 226 if (crypto_is_larval(q)) { 227 struct crypto_larval *larval = (void *)q; 228 229 /* 230 * Check to see if either our generic name or 231 * specific name can satisfy the name requested 232 * by the larval entry q. 233 */ 234 if (strcmp(alg->cra_name, q->cra_name) && 235 strcmp(alg->cra_driver_name, q->cra_name)) 236 continue; 237 238 if (larval->adult) 239 continue; 240 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 241 continue; 242 if (!crypto_mod_get(alg)) 243 continue; 244 245 larval->adult = alg; 246 complete_all(&larval->completion); 247 continue; 248 } 249 250 if (strcmp(alg->cra_name, q->cra_name)) 251 continue; 252 253 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 254 q->cra_priority > alg->cra_priority) 255 continue; 256 257 crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags); 258 } 259 260 complete: 261 complete_all(&test->completion); 262 263 unlock: 264 up_write(&crypto_alg_sem); 265 266 crypto_remove_final(&list); 267 } 268 EXPORT_SYMBOL_GPL(crypto_alg_tested); 269 270 static void crypto_remove_final(struct list_head *list) 271 { 272 struct crypto_alg *alg; 273 struct crypto_alg *n; 274 275 list_for_each_entry_safe(alg, n, list, cra_list) { 276 list_del_init(&alg->cra_list); 277 crypto_alg_put(alg); 278 } 279 } 280 281 static void crypto_wait_for_test(struct crypto_larval *larval) 282 { 283 int err; 284 285 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 286 if (err != NOTIFY_STOP) { 287 if (WARN_ON(err != NOTIFY_DONE)) 288 goto out; 289 crypto_alg_tested(larval->alg.cra_driver_name, 0); 290 } 291 292 err = wait_for_completion_interruptible(&larval->completion); 293 WARN_ON(err); 294 295 out: 296 crypto_larval_kill(&larval->alg); 297 } 298 299 int crypto_register_alg(struct crypto_alg *alg) 300 { 301 struct crypto_larval *larval; 302 int err; 303 304 err = crypto_check_alg(alg); 305 if (err) 306 return err; 307 308 down_write(&crypto_alg_sem); 309 larval = __crypto_register_alg(alg); 310 up_write(&crypto_alg_sem); 311 312 if (IS_ERR(larval)) 313 return PTR_ERR(larval); 314 315 crypto_wait_for_test(larval); 316 return 0; 317 } 318 EXPORT_SYMBOL_GPL(crypto_register_alg); 319 320 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 321 { 322 if (unlikely(list_empty(&alg->cra_list))) 323 return -ENOENT; 324 325 alg->cra_flags |= CRYPTO_ALG_DEAD; 326 327 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); 328 list_del_init(&alg->cra_list); 329 crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags); 330 331 return 0; 332 } 333 334 int crypto_unregister_alg(struct crypto_alg *alg) 335 { 336 int ret; 337 LIST_HEAD(list); 338 339 down_write(&crypto_alg_sem); 340 ret = crypto_remove_alg(alg, &list); 341 up_write(&crypto_alg_sem); 342 343 if (ret) 344 return ret; 345 346 BUG_ON(atomic_read(&alg->cra_refcnt) != 1); 347 if (alg->cra_destroy) 348 alg->cra_destroy(alg); 349 350 crypto_remove_final(&list); 351 return 0; 352 } 353 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 354 355 int crypto_register_template(struct crypto_template *tmpl) 356 { 357 struct crypto_template *q; 358 int err = -EEXIST; 359 360 down_write(&crypto_alg_sem); 361 362 list_for_each_entry(q, &crypto_template_list, list) { 363 if (q == tmpl) 364 goto out; 365 } 366 367 list_add(&tmpl->list, &crypto_template_list); 368 crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl); 369 err = 0; 370 out: 371 up_write(&crypto_alg_sem); 372 return err; 373 } 374 EXPORT_SYMBOL_GPL(crypto_register_template); 375 376 void crypto_unregister_template(struct crypto_template *tmpl) 377 { 378 struct crypto_instance *inst; 379 struct hlist_node *p, *n; 380 struct hlist_head *list; 381 LIST_HEAD(users); 382 383 down_write(&crypto_alg_sem); 384 385 BUG_ON(list_empty(&tmpl->list)); 386 list_del_init(&tmpl->list); 387 388 list = &tmpl->instances; 389 hlist_for_each_entry(inst, p, list, list) { 390 int err = crypto_remove_alg(&inst->alg, &users); 391 BUG_ON(err); 392 } 393 394 crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl); 395 396 up_write(&crypto_alg_sem); 397 398 hlist_for_each_entry_safe(inst, p, n, list, list) { 399 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1); 400 tmpl->free(inst); 401 } 402 crypto_remove_final(&users); 403 } 404 EXPORT_SYMBOL_GPL(crypto_unregister_template); 405 406 static struct crypto_template *__crypto_lookup_template(const char *name) 407 { 408 struct crypto_template *q, *tmpl = NULL; 409 410 down_read(&crypto_alg_sem); 411 list_for_each_entry(q, &crypto_template_list, list) { 412 if (strcmp(q->name, name)) 413 continue; 414 if (unlikely(!crypto_tmpl_get(q))) 415 continue; 416 417 tmpl = q; 418 break; 419 } 420 up_read(&crypto_alg_sem); 421 422 return tmpl; 423 } 424 425 struct crypto_template *crypto_lookup_template(const char *name) 426 { 427 return try_then_request_module(__crypto_lookup_template(name), name); 428 } 429 EXPORT_SYMBOL_GPL(crypto_lookup_template); 430 431 int crypto_register_instance(struct crypto_template *tmpl, 432 struct crypto_instance *inst) 433 { 434 struct crypto_larval *larval; 435 int err; 436 437 err = crypto_check_alg(&inst->alg); 438 if (err) 439 goto err; 440 441 inst->alg.cra_module = tmpl->module; 442 443 down_write(&crypto_alg_sem); 444 445 larval = __crypto_register_alg(&inst->alg); 446 if (IS_ERR(larval)) 447 goto unlock; 448 449 hlist_add_head(&inst->list, &tmpl->instances); 450 inst->tmpl = tmpl; 451 452 unlock: 453 up_write(&crypto_alg_sem); 454 455 err = PTR_ERR(larval); 456 if (IS_ERR(larval)) 457 goto err; 458 459 crypto_wait_for_test(larval); 460 err = 0; 461 462 err: 463 return err; 464 } 465 EXPORT_SYMBOL_GPL(crypto_register_instance); 466 467 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 468 struct crypto_instance *inst, u32 mask) 469 { 470 int err = -EAGAIN; 471 472 spawn->inst = inst; 473 spawn->mask = mask; 474 475 down_write(&crypto_alg_sem); 476 if (!crypto_is_moribund(alg)) { 477 list_add(&spawn->list, &alg->cra_users); 478 spawn->alg = alg; 479 err = 0; 480 } 481 up_write(&crypto_alg_sem); 482 483 return err; 484 } 485 EXPORT_SYMBOL_GPL(crypto_init_spawn); 486 487 void crypto_drop_spawn(struct crypto_spawn *spawn) 488 { 489 down_write(&crypto_alg_sem); 490 list_del(&spawn->list); 491 up_write(&crypto_alg_sem); 492 } 493 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 494 495 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 496 u32 mask) 497 { 498 struct crypto_alg *alg; 499 struct crypto_alg *alg2; 500 struct crypto_tfm *tfm; 501 502 down_read(&crypto_alg_sem); 503 alg = spawn->alg; 504 alg2 = alg; 505 if (alg2) 506 alg2 = crypto_mod_get(alg2); 507 up_read(&crypto_alg_sem); 508 509 if (!alg2) { 510 if (alg) 511 crypto_shoot_alg(alg); 512 return ERR_PTR(-EAGAIN); 513 } 514 515 tfm = ERR_PTR(-EINVAL); 516 if (unlikely((alg->cra_flags ^ type) & mask)) 517 goto out_put_alg; 518 519 tfm = __crypto_alloc_tfm(alg, type, mask); 520 if (IS_ERR(tfm)) 521 goto out_put_alg; 522 523 return tfm; 524 525 out_put_alg: 526 crypto_mod_put(alg); 527 return tfm; 528 } 529 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 530 531 int crypto_register_notifier(struct notifier_block *nb) 532 { 533 return blocking_notifier_chain_register(&crypto_chain, nb); 534 } 535 EXPORT_SYMBOL_GPL(crypto_register_notifier); 536 537 int crypto_unregister_notifier(struct notifier_block *nb) 538 { 539 return blocking_notifier_chain_unregister(&crypto_chain, nb); 540 } 541 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 542 543 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 544 { 545 struct rtattr *rta = tb[0]; 546 struct crypto_attr_type *algt; 547 548 if (!rta) 549 return ERR_PTR(-ENOENT); 550 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 551 return ERR_PTR(-EINVAL); 552 if (rta->rta_type != CRYPTOA_TYPE) 553 return ERR_PTR(-EINVAL); 554 555 algt = RTA_DATA(rta); 556 557 return algt; 558 } 559 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 560 561 int crypto_check_attr_type(struct rtattr **tb, u32 type) 562 { 563 struct crypto_attr_type *algt; 564 565 algt = crypto_get_attr_type(tb); 566 if (IS_ERR(algt)) 567 return PTR_ERR(algt); 568 569 if ((algt->type ^ type) & algt->mask) 570 return -EINVAL; 571 572 return 0; 573 } 574 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 575 576 const char *crypto_attr_alg_name(struct rtattr *rta) 577 { 578 struct crypto_attr_alg *alga; 579 580 if (!rta) 581 return ERR_PTR(-ENOENT); 582 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 583 return ERR_PTR(-EINVAL); 584 if (rta->rta_type != CRYPTOA_ALG) 585 return ERR_PTR(-EINVAL); 586 587 alga = RTA_DATA(rta); 588 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 589 590 return alga->name; 591 } 592 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 593 594 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) 595 { 596 const char *name; 597 int err; 598 599 name = crypto_attr_alg_name(rta); 600 err = PTR_ERR(name); 601 if (IS_ERR(name)) 602 return ERR_PTR(err); 603 604 return crypto_alg_mod_lookup(name, type, mask); 605 } 606 EXPORT_SYMBOL_GPL(crypto_attr_alg); 607 608 int crypto_attr_u32(struct rtattr *rta, u32 *num) 609 { 610 struct crypto_attr_u32 *nu32; 611 612 if (!rta) 613 return -ENOENT; 614 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 615 return -EINVAL; 616 if (rta->rta_type != CRYPTOA_U32) 617 return -EINVAL; 618 619 nu32 = RTA_DATA(rta); 620 *num = nu32->num; 621 622 return 0; 623 } 624 EXPORT_SYMBOL_GPL(crypto_attr_u32); 625 626 struct crypto_instance *crypto_alloc_instance(const char *name, 627 struct crypto_alg *alg) 628 { 629 struct crypto_instance *inst; 630 struct crypto_spawn *spawn; 631 int err; 632 633 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 634 if (!inst) 635 return ERR_PTR(-ENOMEM); 636 637 err = -ENAMETOOLONG; 638 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 639 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 640 goto err_free_inst; 641 642 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 643 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 644 goto err_free_inst; 645 646 spawn = crypto_instance_ctx(inst); 647 err = crypto_init_spawn(spawn, alg, inst, 648 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 649 650 if (err) 651 goto err_free_inst; 652 653 return inst; 654 655 err_free_inst: 656 kfree(inst); 657 return ERR_PTR(err); 658 } 659 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 660 661 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 662 { 663 INIT_LIST_HEAD(&queue->list); 664 queue->backlog = &queue->list; 665 queue->qlen = 0; 666 queue->max_qlen = max_qlen; 667 } 668 EXPORT_SYMBOL_GPL(crypto_init_queue); 669 670 int crypto_enqueue_request(struct crypto_queue *queue, 671 struct crypto_async_request *request) 672 { 673 int err = -EINPROGRESS; 674 675 if (unlikely(queue->qlen >= queue->max_qlen)) { 676 err = -EBUSY; 677 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) 678 goto out; 679 if (queue->backlog == &queue->list) 680 queue->backlog = &request->list; 681 } 682 683 queue->qlen++; 684 list_add_tail(&request->list, &queue->list); 685 686 out: 687 return err; 688 } 689 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 690 691 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 692 { 693 struct list_head *request; 694 695 if (unlikely(!queue->qlen)) 696 return NULL; 697 698 queue->qlen--; 699 700 if (queue->backlog != &queue->list) 701 queue->backlog = queue->backlog->next; 702 703 request = queue->list.next; 704 list_del(request); 705 706 return list_entry(request, struct crypto_async_request, list); 707 } 708 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 709 710 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 711 { 712 struct crypto_async_request *req; 713 714 list_for_each_entry(req, &queue->list, list) { 715 if (req->tfm == tfm) 716 return 1; 717 } 718 719 return 0; 720 } 721 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 722 723 static inline void crypto_inc_byte(u8 *a, unsigned int size) 724 { 725 u8 *b = (a + size); 726 u8 c; 727 728 for (; size; size--) { 729 c = *--b + 1; 730 *b = c; 731 if (c) 732 break; 733 } 734 } 735 736 void crypto_inc(u8 *a, unsigned int size) 737 { 738 __be32 *b = (__be32 *)(a + size); 739 u32 c; 740 741 for (; size >= 4; size -= 4) { 742 c = be32_to_cpu(*--b) + 1; 743 *b = cpu_to_be32(c); 744 if (c) 745 return; 746 } 747 748 crypto_inc_byte(a, size); 749 } 750 EXPORT_SYMBOL_GPL(crypto_inc); 751 752 static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) 753 { 754 for (; size; size--) 755 *a++ ^= *b++; 756 } 757 758 void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 759 { 760 u32 *a = (u32 *)dst; 761 u32 *b = (u32 *)src; 762 763 for (; size >= 4; size -= 4) 764 *a++ ^= *b++; 765 766 crypto_xor_byte((u8 *)a, (u8 *)b, size); 767 } 768 EXPORT_SYMBOL_GPL(crypto_xor); 769 770 static int __init crypto_algapi_init(void) 771 { 772 crypto_init_proc(); 773 return 0; 774 } 775 776 static void __exit crypto_algapi_exit(void) 777 { 778 crypto_exit_proc(); 779 } 780 781 module_init(crypto_algapi_init); 782 module_exit(crypto_algapi_exit); 783 784 MODULE_LICENSE("GPL"); 785 MODULE_DESCRIPTION("Cryptographic algorithms API"); 786