1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Scatterlist Cryptographic API. 4 * 5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 10 * and Nettle, by Niels Möller. 11 */ 12 13 #include <linux/err.h> 14 #include <linux/errno.h> 15 #include <linux/kernel.h> 16 #include <linux/kmod.h> 17 #include <linux/module.h> 18 #include <linux/param.h> 19 #include <linux/sched/signal.h> 20 #include <linux/slab.h> 21 #include <linux/string.h> 22 #include <linux/completion.h> 23 #include "internal.h" 24 25 LIST_HEAD(crypto_alg_list); 26 EXPORT_SYMBOL_GPL(crypto_alg_list); 27 DECLARE_RWSEM(crypto_alg_sem); 28 EXPORT_SYMBOL_GPL(crypto_alg_sem); 29 30 BLOCKING_NOTIFIER_HEAD(crypto_chain); 31 EXPORT_SYMBOL_GPL(crypto_chain); 32 33 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); 34 35 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) 36 { 37 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; 38 } 39 EXPORT_SYMBOL_GPL(crypto_mod_get); 40 41 void crypto_mod_put(struct crypto_alg *alg) 42 { 43 struct module *module = alg->cra_module; 44 45 crypto_alg_put(alg); 46 module_put(module); 47 } 48 EXPORT_SYMBOL_GPL(crypto_mod_put); 49 50 static inline int crypto_is_test_larval(struct crypto_larval *larval) 51 { 52 return larval->alg.cra_driver_name[0]; 53 } 54 55 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, 56 u32 mask) 57 { 58 struct crypto_alg *q, *alg = NULL; 59 int best = -2; 60 61 list_for_each_entry(q, &crypto_alg_list, cra_list) { 62 int exact, fuzzy; 63 64 if (crypto_is_moribund(q)) 65 continue; 66 67 if ((q->cra_flags ^ type) & mask) 68 continue; 69 70 if (crypto_is_larval(q) && 71 !crypto_is_test_larval((struct crypto_larval *)q) && 72 ((struct crypto_larval *)q)->mask != mask) 73 continue; 74 75 exact = !strcmp(q->cra_driver_name, name); 76 fuzzy = !strcmp(q->cra_name, name); 77 if (!exact && !(fuzzy && q->cra_priority > best)) 78 continue; 79 80 if (unlikely(!crypto_mod_get(q))) 81 continue; 82 83 best = q->cra_priority; 84 if (alg) 85 crypto_mod_put(alg); 86 alg = q; 87 88 if (exact) 89 break; 90 } 91 92 return alg; 93 } 94 95 static void crypto_larval_destroy(struct crypto_alg *alg) 96 { 97 struct crypto_larval *larval = (void *)alg; 98 99 BUG_ON(!crypto_is_larval(alg)); 100 if (!IS_ERR_OR_NULL(larval->adult)) 101 crypto_mod_put(larval->adult); 102 kfree(larval); 103 } 104 105 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) 106 { 107 struct crypto_larval *larval; 108 109 larval = kzalloc(sizeof(*larval), GFP_KERNEL); 110 if (!larval) 111 return ERR_PTR(-ENOMEM); 112 113 larval->mask = mask; 114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; 115 larval->alg.cra_priority = -1; 116 larval->alg.cra_destroy = crypto_larval_destroy; 117 118 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); 119 init_completion(&larval->completion); 120 121 return larval; 122 } 123 EXPORT_SYMBOL_GPL(crypto_larval_alloc); 124 125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type, 126 u32 mask) 127 { 128 struct crypto_alg *alg; 129 struct crypto_larval *larval; 130 131 larval = crypto_larval_alloc(name, type, mask); 132 if (IS_ERR(larval)) 133 return ERR_CAST(larval); 134 135 refcount_set(&larval->alg.cra_refcnt, 2); 136 137 down_write(&crypto_alg_sem); 138 alg = __crypto_alg_lookup(name, type, mask); 139 if (!alg) { 140 alg = &larval->alg; 141 list_add(&alg->cra_list, &crypto_alg_list); 142 } 143 up_write(&crypto_alg_sem); 144 145 if (alg != &larval->alg) { 146 kfree(larval); 147 if (crypto_is_larval(alg)) 148 alg = crypto_larval_wait(alg); 149 } 150 151 return alg; 152 } 153 154 void crypto_larval_kill(struct crypto_alg *alg) 155 { 156 struct crypto_larval *larval = (void *)alg; 157 158 down_write(&crypto_alg_sem); 159 list_del(&alg->cra_list); 160 up_write(&crypto_alg_sem); 161 complete_all(&larval->completion); 162 crypto_alg_put(alg); 163 } 164 EXPORT_SYMBOL_GPL(crypto_larval_kill); 165 166 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 167 { 168 struct crypto_larval *larval = (void *)alg; 169 long timeout; 170 171 timeout = wait_for_completion_killable_timeout( 172 &larval->completion, 60 * HZ); 173 174 alg = larval->adult; 175 if (timeout < 0) 176 alg = ERR_PTR(-EINTR); 177 else if (!timeout) 178 alg = ERR_PTR(-ETIMEDOUT); 179 else if (!alg) 180 alg = ERR_PTR(-ENOENT); 181 else if (IS_ERR(alg)) 182 ; 183 else if (crypto_is_test_larval(larval) && 184 !(alg->cra_flags & CRYPTO_ALG_TESTED)) 185 alg = ERR_PTR(-EAGAIN); 186 else if (!crypto_mod_get(alg)) 187 alg = ERR_PTR(-EAGAIN); 188 crypto_mod_put(&larval->alg); 189 190 return alg; 191 } 192 193 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, 194 u32 mask) 195 { 196 struct crypto_alg *alg; 197 u32 test = 0; 198 199 if (!((type | mask) & CRYPTO_ALG_TESTED)) 200 test |= CRYPTO_ALG_TESTED; 201 202 down_read(&crypto_alg_sem); 203 alg = __crypto_alg_lookup(name, type | test, mask | test); 204 if (!alg && test) { 205 alg = __crypto_alg_lookup(name, type, mask); 206 if (alg && !crypto_is_larval(alg)) { 207 /* Test failed */ 208 crypto_mod_put(alg); 209 alg = ERR_PTR(-ELIBBAD); 210 } 211 } 212 up_read(&crypto_alg_sem); 213 214 return alg; 215 } 216 217 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, 218 u32 mask) 219 { 220 struct crypto_alg *alg; 221 222 if (!name) 223 return ERR_PTR(-ENOENT); 224 225 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 226 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 227 228 alg = crypto_alg_lookup(name, type, mask); 229 if (!alg && !(mask & CRYPTO_NOLOAD)) { 230 request_module("crypto-%s", name); 231 232 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & 233 CRYPTO_ALG_NEED_FALLBACK)) 234 request_module("crypto-%s-all", name); 235 236 alg = crypto_alg_lookup(name, type, mask); 237 } 238 239 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg)) 240 alg = crypto_larval_wait(alg); 241 else if (!alg) 242 alg = crypto_larval_add(name, type, mask); 243 244 return alg; 245 } 246 247 int crypto_probing_notify(unsigned long val, void *v) 248 { 249 int ok; 250 251 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 252 if (ok == NOTIFY_DONE) { 253 request_module("cryptomgr"); 254 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 255 } 256 257 return ok; 258 } 259 EXPORT_SYMBOL_GPL(crypto_probing_notify); 260 261 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 262 { 263 struct crypto_alg *alg; 264 struct crypto_alg *larval; 265 int ok; 266 267 /* 268 * If the internal flag is set for a cipher, require a caller to 269 * to invoke the cipher with the internal flag to use that cipher. 270 * Also, if a caller wants to allocate a cipher that may or may 271 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and 272 * !(mask & CRYPTO_ALG_INTERNAL). 273 */ 274 if (!((type | mask) & CRYPTO_ALG_INTERNAL)) 275 mask |= CRYPTO_ALG_INTERNAL; 276 277 larval = crypto_larval_lookup(name, type, mask); 278 if (IS_ERR(larval) || !crypto_is_larval(larval)) 279 return larval; 280 281 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); 282 283 if (ok == NOTIFY_STOP) 284 alg = crypto_larval_wait(larval); 285 else { 286 crypto_mod_put(larval); 287 alg = ERR_PTR(-ENOENT); 288 } 289 crypto_larval_kill(larval); 290 return alg; 291 } 292 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); 293 294 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 295 { 296 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; 297 298 if (type_obj) 299 return type_obj->init(tfm, type, mask); 300 return 0; 301 } 302 303 static void crypto_exit_ops(struct crypto_tfm *tfm) 304 { 305 const struct crypto_type *type = tfm->__crt_alg->cra_type; 306 307 if (type && tfm->exit) 308 tfm->exit(tfm); 309 } 310 311 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) 312 { 313 const struct crypto_type *type_obj = alg->cra_type; 314 unsigned int len; 315 316 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); 317 if (type_obj) 318 return len + type_obj->ctxsize(alg, type, mask); 319 320 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 321 default: 322 BUG(); 323 324 case CRYPTO_ALG_TYPE_CIPHER: 325 len += crypto_cipher_ctxsize(alg); 326 break; 327 328 case CRYPTO_ALG_TYPE_COMPRESS: 329 len += crypto_compress_ctxsize(alg); 330 break; 331 } 332 333 return len; 334 } 335 336 void crypto_shoot_alg(struct crypto_alg *alg) 337 { 338 down_write(&crypto_alg_sem); 339 alg->cra_flags |= CRYPTO_ALG_DYING; 340 up_write(&crypto_alg_sem); 341 } 342 EXPORT_SYMBOL_GPL(crypto_shoot_alg); 343 344 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 345 u32 mask) 346 { 347 struct crypto_tfm *tfm = NULL; 348 unsigned int tfm_size; 349 int err = -ENOMEM; 350 351 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); 352 tfm = kzalloc(tfm_size, GFP_KERNEL); 353 if (tfm == NULL) 354 goto out_err; 355 356 tfm->__crt_alg = alg; 357 358 err = crypto_init_ops(tfm, type, mask); 359 if (err) 360 goto out_free_tfm; 361 362 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 363 goto cra_init_failed; 364 365 goto out; 366 367 cra_init_failed: 368 crypto_exit_ops(tfm); 369 out_free_tfm: 370 if (err == -EAGAIN) 371 crypto_shoot_alg(alg); 372 kfree(tfm); 373 out_err: 374 tfm = ERR_PTR(err); 375 out: 376 return tfm; 377 } 378 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); 379 380 /* 381 * crypto_alloc_base - Locate algorithm and allocate transform 382 * @alg_name: Name of algorithm 383 * @type: Type of algorithm 384 * @mask: Mask for type comparison 385 * 386 * This function should not be used by new algorithm types. 387 * Please use crypto_alloc_tfm instead. 388 * 389 * crypto_alloc_base() will first attempt to locate an already loaded 390 * algorithm. If that fails and the kernel supports dynamically loadable 391 * modules, it will then attempt to load a module of the same name or 392 * alias. If that fails it will send a query to any loaded crypto manager 393 * to construct an algorithm on the fly. A refcount is grabbed on the 394 * algorithm which is then associated with the new transform. 395 * 396 * The returned transform is of a non-determinate type. Most people 397 * should use one of the more specific allocation functions such as 398 * crypto_alloc_skcipher(). 399 * 400 * In case of error the return value is an error pointer. 401 */ 402 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) 403 { 404 struct crypto_tfm *tfm; 405 int err; 406 407 for (;;) { 408 struct crypto_alg *alg; 409 410 alg = crypto_alg_mod_lookup(alg_name, type, mask); 411 if (IS_ERR(alg)) { 412 err = PTR_ERR(alg); 413 goto err; 414 } 415 416 tfm = __crypto_alloc_tfm(alg, type, mask); 417 if (!IS_ERR(tfm)) 418 return tfm; 419 420 crypto_mod_put(alg); 421 err = PTR_ERR(tfm); 422 423 err: 424 if (err != -EAGAIN) 425 break; 426 if (fatal_signal_pending(current)) { 427 err = -EINTR; 428 break; 429 } 430 } 431 432 return ERR_PTR(err); 433 } 434 EXPORT_SYMBOL_GPL(crypto_alloc_base); 435 436 void *crypto_create_tfm(struct crypto_alg *alg, 437 const struct crypto_type *frontend) 438 { 439 char *mem; 440 struct crypto_tfm *tfm = NULL; 441 unsigned int tfmsize; 442 unsigned int total; 443 int err = -ENOMEM; 444 445 tfmsize = frontend->tfmsize; 446 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); 447 448 mem = kzalloc(total, GFP_KERNEL); 449 if (mem == NULL) 450 goto out_err; 451 452 tfm = (struct crypto_tfm *)(mem + tfmsize); 453 tfm->__crt_alg = alg; 454 455 err = frontend->init_tfm(tfm); 456 if (err) 457 goto out_free_tfm; 458 459 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 460 goto cra_init_failed; 461 462 goto out; 463 464 cra_init_failed: 465 crypto_exit_ops(tfm); 466 out_free_tfm: 467 if (err == -EAGAIN) 468 crypto_shoot_alg(alg); 469 kfree(mem); 470 out_err: 471 mem = ERR_PTR(err); 472 out: 473 return mem; 474 } 475 EXPORT_SYMBOL_GPL(crypto_create_tfm); 476 477 struct crypto_alg *crypto_find_alg(const char *alg_name, 478 const struct crypto_type *frontend, 479 u32 type, u32 mask) 480 { 481 if (frontend) { 482 type &= frontend->maskclear; 483 mask &= frontend->maskclear; 484 type |= frontend->type; 485 mask |= frontend->maskset; 486 } 487 488 return crypto_alg_mod_lookup(alg_name, type, mask); 489 } 490 EXPORT_SYMBOL_GPL(crypto_find_alg); 491 492 /* 493 * crypto_alloc_tfm - Locate algorithm and allocate transform 494 * @alg_name: Name of algorithm 495 * @frontend: Frontend algorithm type 496 * @type: Type of algorithm 497 * @mask: Mask for type comparison 498 * 499 * crypto_alloc_tfm() will first attempt to locate an already loaded 500 * algorithm. If that fails and the kernel supports dynamically loadable 501 * modules, it will then attempt to load a module of the same name or 502 * alias. If that fails it will send a query to any loaded crypto manager 503 * to construct an algorithm on the fly. A refcount is grabbed on the 504 * algorithm which is then associated with the new transform. 505 * 506 * The returned transform is of a non-determinate type. Most people 507 * should use one of the more specific allocation functions such as 508 * crypto_alloc_skcipher(). 509 * 510 * In case of error the return value is an error pointer. 511 */ 512 void *crypto_alloc_tfm(const char *alg_name, 513 const struct crypto_type *frontend, u32 type, u32 mask) 514 { 515 void *tfm; 516 int err; 517 518 for (;;) { 519 struct crypto_alg *alg; 520 521 alg = crypto_find_alg(alg_name, frontend, type, mask); 522 if (IS_ERR(alg)) { 523 err = PTR_ERR(alg); 524 goto err; 525 } 526 527 tfm = crypto_create_tfm(alg, frontend); 528 if (!IS_ERR(tfm)) 529 return tfm; 530 531 crypto_mod_put(alg); 532 err = PTR_ERR(tfm); 533 534 err: 535 if (err != -EAGAIN) 536 break; 537 if (fatal_signal_pending(current)) { 538 err = -EINTR; 539 break; 540 } 541 } 542 543 return ERR_PTR(err); 544 } 545 EXPORT_SYMBOL_GPL(crypto_alloc_tfm); 546 547 /* 548 * crypto_destroy_tfm - Free crypto transform 549 * @mem: Start of tfm slab 550 * @tfm: Transform to free 551 * 552 * This function frees up the transform and any associated resources, 553 * then drops the refcount on the associated algorithm. 554 */ 555 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) 556 { 557 struct crypto_alg *alg; 558 559 if (unlikely(!mem)) 560 return; 561 562 alg = tfm->__crt_alg; 563 564 if (!tfm->exit && alg->cra_exit) 565 alg->cra_exit(tfm); 566 crypto_exit_ops(tfm); 567 crypto_mod_put(alg); 568 kzfree(mem); 569 } 570 EXPORT_SYMBOL_GPL(crypto_destroy_tfm); 571 572 int crypto_has_alg(const char *name, u32 type, u32 mask) 573 { 574 int ret = 0; 575 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); 576 577 if (!IS_ERR(alg)) { 578 crypto_mod_put(alg); 579 ret = 1; 580 } 581 582 return ret; 583 } 584 EXPORT_SYMBOL_GPL(crypto_has_alg); 585 586 void crypto_req_done(struct crypto_async_request *req, int err) 587 { 588 struct crypto_wait *wait = req->data; 589 590 if (err == -EINPROGRESS) 591 return; 592 593 wait->err = err; 594 complete(&wait->completion); 595 } 596 EXPORT_SYMBOL_GPL(crypto_req_done); 597 598 MODULE_DESCRIPTION("Cryptographic core API"); 599 MODULE_LICENSE("GPL"); 600 MODULE_SOFTDEP("pre: cryptomgr"); 601