1 /* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <linux/err.h> 14 #include <linux/errno.h> 15 #include <linux/init.h> 16 #include <linux/kernel.h> 17 #include <linux/list.h> 18 #include <linux/module.h> 19 #include <linux/rtnetlink.h> 20 #include <linux/string.h> 21 22 #include "internal.h" 23 24 static void crypto_remove_final(struct list_head *list); 25 26 static LIST_HEAD(crypto_template_list); 27 28 void crypto_larval_error(const char *name, u32 type, u32 mask) 29 { 30 struct crypto_alg *alg; 31 32 alg = crypto_alg_lookup(name, type, mask); 33 34 if (alg) { 35 if (crypto_is_larval(alg)) { 36 struct crypto_larval *larval = (void *)alg; 37 complete_all(&larval->completion); 38 } 39 crypto_mod_put(alg); 40 } 41 } 42 EXPORT_SYMBOL_GPL(crypto_larval_error); 43 44 static inline int crypto_set_driver_name(struct crypto_alg *alg) 45 { 46 static const char suffix[] = "-generic"; 47 char *driver_name = alg->cra_driver_name; 48 int len; 49 50 if (*driver_name) 51 return 0; 52 53 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 54 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 55 return -ENAMETOOLONG; 56 57 memcpy(driver_name + len, suffix, sizeof(suffix)); 58 return 0; 59 } 60 61 static int crypto_check_alg(struct crypto_alg *alg) 62 { 63 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 64 return -EINVAL; 65 66 if (alg->cra_blocksize > PAGE_SIZE / 8) 67 return -EINVAL; 68 69 if (alg->cra_priority < 0) 70 return -EINVAL; 71 72 return crypto_set_driver_name(alg); 73 } 74 75 static void crypto_destroy_instance(struct crypto_alg *alg) 76 { 77 struct crypto_instance *inst = (void *)alg; 78 struct crypto_template *tmpl = inst->tmpl; 79 80 tmpl->free(inst); 81 crypto_tmpl_put(tmpl); 82 } 83 84 static void crypto_remove_spawn(struct crypto_spawn *spawn, 85 struct list_head *list, 86 struct list_head *secondary_spawns) 87 { 88 struct crypto_instance *inst = spawn->inst; 89 struct crypto_template *tmpl = inst->tmpl; 90 91 list_del_init(&spawn->list); 92 spawn->alg = NULL; 93 94 if (crypto_is_dead(&inst->alg)) 95 return; 96 97 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 98 if (hlist_unhashed(&inst->list)) 99 return; 100 101 if (!tmpl || !crypto_tmpl_get(tmpl)) 102 return; 103 104 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg); 105 list_move(&inst->alg.cra_list, list); 106 hlist_del(&inst->list); 107 inst->alg.cra_destroy = crypto_destroy_instance; 108 109 list_splice(&inst->alg.cra_users, secondary_spawns); 110 } 111 112 static void crypto_remove_spawns(struct list_head *spawns, 113 struct list_head *list, u32 new_type) 114 { 115 struct crypto_spawn *spawn, *n; 116 LIST_HEAD(secondary_spawns); 117 118 list_for_each_entry_safe(spawn, n, spawns, list) { 119 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 120 continue; 121 122 crypto_remove_spawn(spawn, list, &secondary_spawns); 123 } 124 125 while (!list_empty(&secondary_spawns)) { 126 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) 127 crypto_remove_spawn(spawn, list, &secondary_spawns); 128 } 129 } 130 131 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 132 { 133 struct crypto_alg *q; 134 struct crypto_larval *larval; 135 int ret = -EAGAIN; 136 137 if (crypto_is_dead(alg)) 138 goto err; 139 140 INIT_LIST_HEAD(&alg->cra_users); 141 142 /* No cheating! */ 143 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 144 145 ret = -EEXIST; 146 147 atomic_set(&alg->cra_refcnt, 1); 148 list_for_each_entry(q, &crypto_alg_list, cra_list) { 149 if (q == alg) 150 goto err; 151 152 if (crypto_is_moribund(q)) 153 continue; 154 155 if (crypto_is_larval(q)) { 156 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 157 goto err; 158 continue; 159 } 160 161 if (!strcmp(q->cra_driver_name, alg->cra_name) || 162 !strcmp(q->cra_name, alg->cra_driver_name)) 163 goto err; 164 } 165 166 larval = crypto_larval_alloc(alg->cra_name, 167 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 168 if (IS_ERR(larval)) 169 goto out; 170 171 ret = -ENOENT; 172 larval->adult = crypto_mod_get(alg); 173 if (!larval->adult) 174 goto free_larval; 175 176 atomic_set(&larval->alg.cra_refcnt, 1); 177 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 178 CRYPTO_MAX_ALG_NAME); 179 larval->alg.cra_priority = alg->cra_priority; 180 181 list_add(&alg->cra_list, &crypto_alg_list); 182 list_add(&larval->alg.cra_list, &crypto_alg_list); 183 184 out: 185 return larval; 186 187 free_larval: 188 kfree(larval); 189 err: 190 larval = ERR_PTR(ret); 191 goto out; 192 } 193 194 void crypto_alg_tested(const char *name, int err) 195 { 196 struct crypto_larval *test; 197 struct crypto_alg *alg; 198 struct crypto_alg *q; 199 LIST_HEAD(list); 200 201 down_write(&crypto_alg_sem); 202 list_for_each_entry(q, &crypto_alg_list, cra_list) { 203 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 204 continue; 205 206 test = (struct crypto_larval *)q; 207 208 if (!strcmp(q->cra_driver_name, name)) 209 goto found; 210 } 211 212 printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err); 213 goto unlock; 214 215 found: 216 q->cra_flags |= CRYPTO_ALG_DEAD; 217 alg = test->adult; 218 if (err || list_empty(&alg->cra_list)) 219 goto complete; 220 221 alg->cra_flags |= CRYPTO_ALG_TESTED; 222 223 list_for_each_entry(q, &crypto_alg_list, cra_list) { 224 if (q == alg) 225 continue; 226 227 if (crypto_is_moribund(q)) 228 continue; 229 230 if (crypto_is_larval(q)) { 231 struct crypto_larval *larval = (void *)q; 232 233 /* 234 * Check to see if either our generic name or 235 * specific name can satisfy the name requested 236 * by the larval entry q. 237 */ 238 if (strcmp(alg->cra_name, q->cra_name) && 239 strcmp(alg->cra_driver_name, q->cra_name)) 240 continue; 241 242 if (larval->adult) 243 continue; 244 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 245 continue; 246 if (!crypto_mod_get(alg)) 247 continue; 248 249 larval->adult = alg; 250 complete_all(&larval->completion); 251 continue; 252 } 253 254 if (strcmp(alg->cra_name, q->cra_name)) 255 continue; 256 257 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 258 q->cra_priority > alg->cra_priority) 259 continue; 260 261 crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags); 262 } 263 264 complete: 265 complete_all(&test->completion); 266 267 unlock: 268 up_write(&crypto_alg_sem); 269 270 crypto_remove_final(&list); 271 } 272 EXPORT_SYMBOL_GPL(crypto_alg_tested); 273 274 static void crypto_remove_final(struct list_head *list) 275 { 276 struct crypto_alg *alg; 277 struct crypto_alg *n; 278 279 list_for_each_entry_safe(alg, n, list, cra_list) { 280 list_del_init(&alg->cra_list); 281 crypto_alg_put(alg); 282 } 283 } 284 285 static void crypto_wait_for_test(struct crypto_larval *larval) 286 { 287 int err; 288 289 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 290 if (err != NOTIFY_STOP) { 291 if (WARN_ON(err != NOTIFY_DONE)) 292 goto out; 293 crypto_alg_tested(larval->alg.cra_driver_name, 0); 294 } 295 296 err = wait_for_completion_interruptible(&larval->completion); 297 WARN_ON(err); 298 299 out: 300 crypto_larval_kill(&larval->alg); 301 } 302 303 int crypto_register_alg(struct crypto_alg *alg) 304 { 305 struct crypto_larval *larval; 306 int err; 307 308 err = crypto_check_alg(alg); 309 if (err) 310 return err; 311 312 down_write(&crypto_alg_sem); 313 larval = __crypto_register_alg(alg); 314 up_write(&crypto_alg_sem); 315 316 if (IS_ERR(larval)) 317 return PTR_ERR(larval); 318 319 crypto_wait_for_test(larval); 320 return 0; 321 } 322 EXPORT_SYMBOL_GPL(crypto_register_alg); 323 324 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 325 { 326 if (unlikely(list_empty(&alg->cra_list))) 327 return -ENOENT; 328 329 alg->cra_flags |= CRYPTO_ALG_DEAD; 330 331 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); 332 list_del_init(&alg->cra_list); 333 crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags); 334 335 return 0; 336 } 337 338 int crypto_unregister_alg(struct crypto_alg *alg) 339 { 340 int ret; 341 LIST_HEAD(list); 342 343 down_write(&crypto_alg_sem); 344 ret = crypto_remove_alg(alg, &list); 345 up_write(&crypto_alg_sem); 346 347 if (ret) 348 return ret; 349 350 BUG_ON(atomic_read(&alg->cra_refcnt) != 1); 351 if (alg->cra_destroy) 352 alg->cra_destroy(alg); 353 354 crypto_remove_final(&list); 355 return 0; 356 } 357 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 358 359 int crypto_register_template(struct crypto_template *tmpl) 360 { 361 struct crypto_template *q; 362 int err = -EEXIST; 363 364 down_write(&crypto_alg_sem); 365 366 list_for_each_entry(q, &crypto_template_list, list) { 367 if (q == tmpl) 368 goto out; 369 } 370 371 list_add(&tmpl->list, &crypto_template_list); 372 crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl); 373 err = 0; 374 out: 375 up_write(&crypto_alg_sem); 376 return err; 377 } 378 EXPORT_SYMBOL_GPL(crypto_register_template); 379 380 void crypto_unregister_template(struct crypto_template *tmpl) 381 { 382 struct crypto_instance *inst; 383 struct hlist_node *p, *n; 384 struct hlist_head *list; 385 LIST_HEAD(users); 386 387 down_write(&crypto_alg_sem); 388 389 BUG_ON(list_empty(&tmpl->list)); 390 list_del_init(&tmpl->list); 391 392 list = &tmpl->instances; 393 hlist_for_each_entry(inst, p, list, list) { 394 int err = crypto_remove_alg(&inst->alg, &users); 395 BUG_ON(err); 396 } 397 398 crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl); 399 400 up_write(&crypto_alg_sem); 401 402 hlist_for_each_entry_safe(inst, p, n, list, list) { 403 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1); 404 tmpl->free(inst); 405 } 406 crypto_remove_final(&users); 407 } 408 EXPORT_SYMBOL_GPL(crypto_unregister_template); 409 410 static struct crypto_template *__crypto_lookup_template(const char *name) 411 { 412 struct crypto_template *q, *tmpl = NULL; 413 414 down_read(&crypto_alg_sem); 415 list_for_each_entry(q, &crypto_template_list, list) { 416 if (strcmp(q->name, name)) 417 continue; 418 if (unlikely(!crypto_tmpl_get(q))) 419 continue; 420 421 tmpl = q; 422 break; 423 } 424 up_read(&crypto_alg_sem); 425 426 return tmpl; 427 } 428 429 struct crypto_template *crypto_lookup_template(const char *name) 430 { 431 return try_then_request_module(__crypto_lookup_template(name), name); 432 } 433 EXPORT_SYMBOL_GPL(crypto_lookup_template); 434 435 int crypto_register_instance(struct crypto_template *tmpl, 436 struct crypto_instance *inst) 437 { 438 struct crypto_larval *larval; 439 int err; 440 441 err = crypto_check_alg(&inst->alg); 442 if (err) 443 goto err; 444 445 inst->alg.cra_module = tmpl->module; 446 447 down_write(&crypto_alg_sem); 448 449 larval = __crypto_register_alg(&inst->alg); 450 if (IS_ERR(larval)) 451 goto unlock; 452 453 hlist_add_head(&inst->list, &tmpl->instances); 454 inst->tmpl = tmpl; 455 456 unlock: 457 up_write(&crypto_alg_sem); 458 459 err = PTR_ERR(larval); 460 if (IS_ERR(larval)) 461 goto err; 462 463 crypto_wait_for_test(larval); 464 err = 0; 465 466 err: 467 return err; 468 } 469 EXPORT_SYMBOL_GPL(crypto_register_instance); 470 471 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 472 struct crypto_instance *inst, u32 mask) 473 { 474 int err = -EAGAIN; 475 476 spawn->inst = inst; 477 spawn->mask = mask; 478 479 down_write(&crypto_alg_sem); 480 if (!crypto_is_moribund(alg)) { 481 list_add(&spawn->list, &alg->cra_users); 482 spawn->alg = alg; 483 err = 0; 484 } 485 up_write(&crypto_alg_sem); 486 487 return err; 488 } 489 EXPORT_SYMBOL_GPL(crypto_init_spawn); 490 491 void crypto_drop_spawn(struct crypto_spawn *spawn) 492 { 493 down_write(&crypto_alg_sem); 494 list_del(&spawn->list); 495 up_write(&crypto_alg_sem); 496 } 497 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 498 499 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 500 u32 mask) 501 { 502 struct crypto_alg *alg; 503 struct crypto_alg *alg2; 504 struct crypto_tfm *tfm; 505 506 down_read(&crypto_alg_sem); 507 alg = spawn->alg; 508 alg2 = alg; 509 if (alg2) 510 alg2 = crypto_mod_get(alg2); 511 up_read(&crypto_alg_sem); 512 513 if (!alg2) { 514 if (alg) 515 crypto_shoot_alg(alg); 516 return ERR_PTR(-EAGAIN); 517 } 518 519 tfm = ERR_PTR(-EINVAL); 520 if (unlikely((alg->cra_flags ^ type) & mask)) 521 goto out_put_alg; 522 523 tfm = __crypto_alloc_tfm(alg, type, mask); 524 if (IS_ERR(tfm)) 525 goto out_put_alg; 526 527 return tfm; 528 529 out_put_alg: 530 crypto_mod_put(alg); 531 return tfm; 532 } 533 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 534 535 int crypto_register_notifier(struct notifier_block *nb) 536 { 537 return blocking_notifier_chain_register(&crypto_chain, nb); 538 } 539 EXPORT_SYMBOL_GPL(crypto_register_notifier); 540 541 int crypto_unregister_notifier(struct notifier_block *nb) 542 { 543 return blocking_notifier_chain_unregister(&crypto_chain, nb); 544 } 545 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 546 547 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 548 { 549 struct rtattr *rta = tb[0]; 550 struct crypto_attr_type *algt; 551 552 if (!rta) 553 return ERR_PTR(-ENOENT); 554 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 555 return ERR_PTR(-EINVAL); 556 if (rta->rta_type != CRYPTOA_TYPE) 557 return ERR_PTR(-EINVAL); 558 559 algt = RTA_DATA(rta); 560 561 return algt; 562 } 563 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 564 565 int crypto_check_attr_type(struct rtattr **tb, u32 type) 566 { 567 struct crypto_attr_type *algt; 568 569 algt = crypto_get_attr_type(tb); 570 if (IS_ERR(algt)) 571 return PTR_ERR(algt); 572 573 if ((algt->type ^ type) & algt->mask) 574 return -EINVAL; 575 576 return 0; 577 } 578 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 579 580 const char *crypto_attr_alg_name(struct rtattr *rta) 581 { 582 struct crypto_attr_alg *alga; 583 584 if (!rta) 585 return ERR_PTR(-ENOENT); 586 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 587 return ERR_PTR(-EINVAL); 588 if (rta->rta_type != CRYPTOA_ALG) 589 return ERR_PTR(-EINVAL); 590 591 alga = RTA_DATA(rta); 592 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 593 594 return alga->name; 595 } 596 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 597 598 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) 599 { 600 const char *name; 601 int err; 602 603 name = crypto_attr_alg_name(rta); 604 err = PTR_ERR(name); 605 if (IS_ERR(name)) 606 return ERR_PTR(err); 607 608 return crypto_alg_mod_lookup(name, type, mask); 609 } 610 EXPORT_SYMBOL_GPL(crypto_attr_alg); 611 612 int crypto_attr_u32(struct rtattr *rta, u32 *num) 613 { 614 struct crypto_attr_u32 *nu32; 615 616 if (!rta) 617 return -ENOENT; 618 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 619 return -EINVAL; 620 if (rta->rta_type != CRYPTOA_U32) 621 return -EINVAL; 622 623 nu32 = RTA_DATA(rta); 624 *num = nu32->num; 625 626 return 0; 627 } 628 EXPORT_SYMBOL_GPL(crypto_attr_u32); 629 630 struct crypto_instance *crypto_alloc_instance(const char *name, 631 struct crypto_alg *alg) 632 { 633 struct crypto_instance *inst; 634 struct crypto_spawn *spawn; 635 int err; 636 637 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 638 if (!inst) 639 return ERR_PTR(-ENOMEM); 640 641 err = -ENAMETOOLONG; 642 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 643 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 644 goto err_free_inst; 645 646 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 647 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 648 goto err_free_inst; 649 650 spawn = crypto_instance_ctx(inst); 651 err = crypto_init_spawn(spawn, alg, inst, 652 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 653 654 if (err) 655 goto err_free_inst; 656 657 return inst; 658 659 err_free_inst: 660 kfree(inst); 661 return ERR_PTR(err); 662 } 663 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 664 665 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 666 { 667 INIT_LIST_HEAD(&queue->list); 668 queue->backlog = &queue->list; 669 queue->qlen = 0; 670 queue->max_qlen = max_qlen; 671 } 672 EXPORT_SYMBOL_GPL(crypto_init_queue); 673 674 int crypto_enqueue_request(struct crypto_queue *queue, 675 struct crypto_async_request *request) 676 { 677 int err = -EINPROGRESS; 678 679 if (unlikely(queue->qlen >= queue->max_qlen)) { 680 err = -EBUSY; 681 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) 682 goto out; 683 if (queue->backlog == &queue->list) 684 queue->backlog = &request->list; 685 } 686 687 queue->qlen++; 688 list_add_tail(&request->list, &queue->list); 689 690 out: 691 return err; 692 } 693 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 694 695 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 696 { 697 struct list_head *request; 698 699 if (unlikely(!queue->qlen)) 700 return NULL; 701 702 queue->qlen--; 703 704 if (queue->backlog != &queue->list) 705 queue->backlog = queue->backlog->next; 706 707 request = queue->list.next; 708 list_del(request); 709 710 return list_entry(request, struct crypto_async_request, list); 711 } 712 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 713 714 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 715 { 716 struct crypto_async_request *req; 717 718 list_for_each_entry(req, &queue->list, list) { 719 if (req->tfm == tfm) 720 return 1; 721 } 722 723 return 0; 724 } 725 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 726 727 static inline void crypto_inc_byte(u8 *a, unsigned int size) 728 { 729 u8 *b = (a + size); 730 u8 c; 731 732 for (; size; size--) { 733 c = *--b + 1; 734 *b = c; 735 if (c) 736 break; 737 } 738 } 739 740 void crypto_inc(u8 *a, unsigned int size) 741 { 742 __be32 *b = (__be32 *)(a + size); 743 u32 c; 744 745 for (; size >= 4; size -= 4) { 746 c = be32_to_cpu(*--b) + 1; 747 *b = cpu_to_be32(c); 748 if (c) 749 return; 750 } 751 752 crypto_inc_byte(a, size); 753 } 754 EXPORT_SYMBOL_GPL(crypto_inc); 755 756 static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) 757 { 758 for (; size; size--) 759 *a++ ^= *b++; 760 } 761 762 void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 763 { 764 u32 *a = (u32 *)dst; 765 u32 *b = (u32 *)src; 766 767 for (; size >= 4; size -= 4) 768 *a++ ^= *b++; 769 770 crypto_xor_byte((u8 *)a, (u8 *)b, size); 771 } 772 EXPORT_SYMBOL_GPL(crypto_xor); 773 774 static int __init crypto_algapi_init(void) 775 { 776 crypto_init_proc(); 777 return 0; 778 } 779 780 static void __exit crypto_algapi_exit(void) 781 { 782 crypto_exit_proc(); 783 } 784 785 module_init(crypto_algapi_init); 786 module_exit(crypto_algapi_exit); 787 788 MODULE_LICENSE("GPL"); 789 MODULE_DESCRIPTION("Cryptographic algorithms API"); 790