1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 9 * and Nettle, by Niels M�ller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 18 #include <linux/err.h> 19 #include <linux/errno.h> 20 #include <linux/kernel.h> 21 #include <linux/kmod.h> 22 #include <linux/module.h> 23 #include <linux/param.h> 24 #include <linux/sched.h> 25 #include <linux/slab.h> 26 #include <linux/string.h> 27 #include "internal.h" 28 29 LIST_HEAD(crypto_alg_list); 30 EXPORT_SYMBOL_GPL(crypto_alg_list); 31 DECLARE_RWSEM(crypto_alg_sem); 32 EXPORT_SYMBOL_GPL(crypto_alg_sem); 33 34 BLOCKING_NOTIFIER_HEAD(crypto_chain); 35 EXPORT_SYMBOL_GPL(crypto_chain); 36 37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg) 38 { 39 atomic_inc(&alg->cra_refcnt); 40 return alg; 41 } 42 43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) 44 { 45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; 46 } 47 EXPORT_SYMBOL_GPL(crypto_mod_get); 48 49 void crypto_mod_put(struct crypto_alg *alg) 50 { 51 crypto_alg_put(alg); 52 module_put(alg->cra_module); 53 } 54 EXPORT_SYMBOL_GPL(crypto_mod_put); 55 56 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask) 57 { 58 struct crypto_alg *q, *alg = NULL; 59 int best = -2; 60 61 list_for_each_entry(q, &crypto_alg_list, cra_list) { 62 int exact, fuzzy; 63 64 if (crypto_is_moribund(q)) 65 continue; 66 67 if ((q->cra_flags ^ type) & mask) 68 continue; 69 70 if (crypto_is_larval(q) && 71 ((struct crypto_larval *)q)->mask != mask) 72 continue; 73 74 exact = !strcmp(q->cra_driver_name, name); 75 fuzzy = !strcmp(q->cra_name, name); 76 if (!exact && !(fuzzy && q->cra_priority > best)) 77 continue; 78 79 if (unlikely(!crypto_mod_get(q))) 80 continue; 81 82 best = q->cra_priority; 83 if (alg) 84 crypto_mod_put(alg); 85 alg = q; 86 87 if (exact) 88 break; 89 } 90 91 return alg; 92 } 93 EXPORT_SYMBOL_GPL(__crypto_alg_lookup); 94 95 static void crypto_larval_destroy(struct crypto_alg *alg) 96 { 97 struct crypto_larval *larval = (void *)alg; 98 99 BUG_ON(!crypto_is_larval(alg)); 100 if (larval->adult) 101 crypto_mod_put(larval->adult); 102 kfree(larval); 103 } 104 105 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type, 106 u32 mask) 107 { 108 struct crypto_alg *alg; 109 struct crypto_larval *larval; 110 111 larval = kzalloc(sizeof(*larval), GFP_KERNEL); 112 if (!larval) 113 return ERR_PTR(-ENOMEM); 114 115 larval->mask = mask; 116 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; 117 larval->alg.cra_priority = -1; 118 larval->alg.cra_destroy = crypto_larval_destroy; 119 120 atomic_set(&larval->alg.cra_refcnt, 2); 121 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); 122 init_completion(&larval->completion); 123 124 down_write(&crypto_alg_sem); 125 alg = __crypto_alg_lookup(name, type, mask); 126 if (!alg) { 127 alg = &larval->alg; 128 list_add(&alg->cra_list, &crypto_alg_list); 129 } 130 up_write(&crypto_alg_sem); 131 132 if (alg != &larval->alg) 133 kfree(larval); 134 135 return alg; 136 } 137 138 static void crypto_larval_kill(struct crypto_alg *alg) 139 { 140 struct crypto_larval *larval = (void *)alg; 141 142 down_write(&crypto_alg_sem); 143 list_del(&alg->cra_list); 144 up_write(&crypto_alg_sem); 145 complete(&larval->completion); 146 crypto_alg_put(alg); 147 } 148 149 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 150 { 151 struct crypto_larval *larval = (void *)alg; 152 153 wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ); 154 alg = larval->adult; 155 if (alg) { 156 if (!crypto_mod_get(alg)) 157 alg = ERR_PTR(-EAGAIN); 158 } else 159 alg = ERR_PTR(-ENOENT); 160 crypto_mod_put(&larval->alg); 161 162 return alg; 163 } 164 165 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, 166 u32 mask) 167 { 168 struct crypto_alg *alg; 169 170 down_read(&crypto_alg_sem); 171 alg = __crypto_alg_lookup(name, type, mask); 172 up_read(&crypto_alg_sem); 173 174 return alg; 175 } 176 177 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 178 { 179 struct crypto_alg *alg; 180 struct crypto_alg *larval; 181 int ok; 182 183 if (!name) 184 return ERR_PTR(-ENOENT); 185 186 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 187 type &= mask; 188 189 alg = try_then_request_module(crypto_alg_lookup(name, type, mask), 190 name); 191 if (alg) 192 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 193 194 larval = crypto_larval_alloc(name, type, mask); 195 if (IS_ERR(larval) || !crypto_is_larval(larval)) 196 return larval; 197 198 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval); 199 if (ok == NOTIFY_DONE) { 200 request_module("cryptomgr"); 201 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval); 202 } 203 204 if (ok == NOTIFY_STOP) 205 alg = crypto_larval_wait(larval); 206 else { 207 crypto_mod_put(larval); 208 alg = ERR_PTR(-ENOENT); 209 } 210 crypto_larval_kill(larval); 211 return alg; 212 } 213 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); 214 215 static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags) 216 { 217 tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK; 218 flags &= ~CRYPTO_TFM_REQ_MASK; 219 220 switch (crypto_tfm_alg_type(tfm)) { 221 case CRYPTO_ALG_TYPE_CIPHER: 222 return crypto_init_cipher_flags(tfm, flags); 223 224 case CRYPTO_ALG_TYPE_DIGEST: 225 return crypto_init_digest_flags(tfm, flags); 226 227 case CRYPTO_ALG_TYPE_COMPRESS: 228 return crypto_init_compress_flags(tfm, flags); 229 } 230 231 return 0; 232 } 233 234 static int crypto_init_ops(struct crypto_tfm *tfm) 235 { 236 const struct crypto_type *type = tfm->__crt_alg->cra_type; 237 238 if (type) 239 return type->init(tfm); 240 241 switch (crypto_tfm_alg_type(tfm)) { 242 case CRYPTO_ALG_TYPE_CIPHER: 243 return crypto_init_cipher_ops(tfm); 244 245 case CRYPTO_ALG_TYPE_DIGEST: 246 return crypto_init_digest_ops(tfm); 247 248 case CRYPTO_ALG_TYPE_COMPRESS: 249 return crypto_init_compress_ops(tfm); 250 251 default: 252 break; 253 } 254 255 BUG(); 256 return -EINVAL; 257 } 258 259 static void crypto_exit_ops(struct crypto_tfm *tfm) 260 { 261 const struct crypto_type *type = tfm->__crt_alg->cra_type; 262 263 if (type) { 264 if (type->exit) 265 type->exit(tfm); 266 return; 267 } 268 269 switch (crypto_tfm_alg_type(tfm)) { 270 case CRYPTO_ALG_TYPE_CIPHER: 271 crypto_exit_cipher_ops(tfm); 272 break; 273 274 case CRYPTO_ALG_TYPE_DIGEST: 275 crypto_exit_digest_ops(tfm); 276 break; 277 278 case CRYPTO_ALG_TYPE_COMPRESS: 279 crypto_exit_compress_ops(tfm); 280 break; 281 282 default: 283 BUG(); 284 285 } 286 } 287 288 static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) 289 { 290 const struct crypto_type *type = alg->cra_type; 291 unsigned int len; 292 293 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); 294 if (type) 295 return len + type->ctxsize(alg); 296 297 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 298 default: 299 BUG(); 300 301 case CRYPTO_ALG_TYPE_CIPHER: 302 len += crypto_cipher_ctxsize(alg, flags); 303 break; 304 305 case CRYPTO_ALG_TYPE_DIGEST: 306 len += crypto_digest_ctxsize(alg, flags); 307 break; 308 309 case CRYPTO_ALG_TYPE_COMPRESS: 310 len += crypto_compress_ctxsize(alg, flags); 311 break; 312 } 313 314 return len; 315 } 316 317 void crypto_shoot_alg(struct crypto_alg *alg) 318 { 319 down_write(&crypto_alg_sem); 320 alg->cra_flags |= CRYPTO_ALG_DYING; 321 up_write(&crypto_alg_sem); 322 } 323 EXPORT_SYMBOL_GPL(crypto_shoot_alg); 324 325 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags) 326 { 327 struct crypto_tfm *tfm = NULL; 328 unsigned int tfm_size; 329 int err = -ENOMEM; 330 331 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); 332 tfm = kzalloc(tfm_size, GFP_KERNEL); 333 if (tfm == NULL) 334 goto out; 335 336 tfm->__crt_alg = alg; 337 338 err = crypto_init_flags(tfm, flags); 339 if (err) 340 goto out_free_tfm; 341 342 err = crypto_init_ops(tfm); 343 if (err) 344 goto out_free_tfm; 345 346 if (alg->cra_init && (err = alg->cra_init(tfm))) { 347 if (err == -EAGAIN) 348 crypto_shoot_alg(alg); 349 goto cra_init_failed; 350 } 351 352 goto out; 353 354 cra_init_failed: 355 crypto_exit_ops(tfm); 356 out_free_tfm: 357 kfree(tfm); 358 tfm = ERR_PTR(err); 359 out: 360 return tfm; 361 } 362 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); 363 364 struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) 365 { 366 struct crypto_tfm *tfm = NULL; 367 int err; 368 369 do { 370 struct crypto_alg *alg; 371 372 alg = crypto_alg_mod_lookup(name, 0, CRYPTO_ALG_ASYNC); 373 err = PTR_ERR(alg); 374 if (IS_ERR(alg)) 375 continue; 376 377 tfm = __crypto_alloc_tfm(alg, flags); 378 err = 0; 379 if (IS_ERR(tfm)) { 380 crypto_mod_put(alg); 381 err = PTR_ERR(tfm); 382 tfm = NULL; 383 } 384 } while (err == -EAGAIN && !signal_pending(current)); 385 386 return tfm; 387 } 388 389 /* 390 * crypto_alloc_base - Locate algorithm and allocate transform 391 * @alg_name: Name of algorithm 392 * @type: Type of algorithm 393 * @mask: Mask for type comparison 394 * 395 * crypto_alloc_base() will first attempt to locate an already loaded 396 * algorithm. If that fails and the kernel supports dynamically loadable 397 * modules, it will then attempt to load a module of the same name or 398 * alias. If that fails it will send a query to any loaded crypto manager 399 * to construct an algorithm on the fly. A refcount is grabbed on the 400 * algorithm which is then associated with the new transform. 401 * 402 * The returned transform is of a non-determinate type. Most people 403 * should use one of the more specific allocation functions such as 404 * crypto_alloc_blkcipher. 405 * 406 * In case of error the return value is an error pointer. 407 */ 408 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) 409 { 410 struct crypto_tfm *tfm; 411 int err; 412 413 for (;;) { 414 struct crypto_alg *alg; 415 416 alg = crypto_alg_mod_lookup(alg_name, type, mask); 417 err = PTR_ERR(alg); 418 tfm = ERR_PTR(err); 419 if (IS_ERR(alg)) 420 goto err; 421 422 tfm = __crypto_alloc_tfm(alg, 0); 423 if (!IS_ERR(tfm)) 424 break; 425 426 crypto_mod_put(alg); 427 err = PTR_ERR(tfm); 428 429 err: 430 if (err != -EAGAIN) 431 break; 432 if (signal_pending(current)) { 433 err = -EINTR; 434 break; 435 } 436 }; 437 438 return tfm; 439 } 440 EXPORT_SYMBOL_GPL(crypto_alloc_base); 441 442 /* 443 * crypto_free_tfm - Free crypto transform 444 * @tfm: Transform to free 445 * 446 * crypto_free_tfm() frees up the transform and any associated resources, 447 * then drops the refcount on the associated algorithm. 448 */ 449 void crypto_free_tfm(struct crypto_tfm *tfm) 450 { 451 struct crypto_alg *alg; 452 int size; 453 454 if (unlikely(!tfm)) 455 return; 456 457 alg = tfm->__crt_alg; 458 size = sizeof(*tfm) + alg->cra_ctxsize; 459 460 if (alg->cra_exit) 461 alg->cra_exit(tfm); 462 crypto_exit_ops(tfm); 463 crypto_mod_put(alg); 464 memset(tfm, 0, size); 465 kfree(tfm); 466 } 467 468 int crypto_alg_available(const char *name, u32 flags) 469 { 470 int ret = 0; 471 struct crypto_alg *alg = crypto_alg_mod_lookup(name, 0, 472 CRYPTO_ALG_ASYNC); 473 474 if (!IS_ERR(alg)) { 475 crypto_mod_put(alg); 476 ret = 1; 477 } 478 479 return ret; 480 } 481 482 EXPORT_SYMBOL_GPL(crypto_alloc_tfm); 483 EXPORT_SYMBOL_GPL(crypto_free_tfm); 484 EXPORT_SYMBOL_GPL(crypto_alg_available); 485 486 int crypto_has_alg(const char *name, u32 type, u32 mask) 487 { 488 int ret = 0; 489 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); 490 491 if (!IS_ERR(alg)) { 492 crypto_mod_put(alg); 493 ret = 1; 494 } 495 496 return ret; 497 } 498 EXPORT_SYMBOL_GPL(crypto_has_alg); 499