xref: /linux/crypto/api.c (revision 652e01be364b5bf2e7d4097831d1510c7301bdc2)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10  * and Nettle, by Niels Möller.
11  */
12 
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
24 #include "internal.h"
25 
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30 
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
33 
34 #if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
35     !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
36 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
37 #endif
38 
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
41 					    u32 mask);
42 
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
44 {
45 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
46 }
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
48 
49 void crypto_mod_put(struct crypto_alg *alg)
50 {
51 	struct module *module = alg->cra_module;
52 
53 	crypto_alg_put(alg);
54 	module_put(module);
55 }
56 EXPORT_SYMBOL_GPL(crypto_mod_put);
57 
58 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
59 					      u32 mask)
60 {
61 	struct crypto_alg *q, *alg = NULL;
62 	int best = -2;
63 
64 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
65 		int exact, fuzzy;
66 
67 		if (crypto_is_moribund(q))
68 			continue;
69 
70 		if ((q->cra_flags ^ type) & mask)
71 			continue;
72 
73 		if (crypto_is_larval(q) &&
74 		    !crypto_is_test_larval((struct crypto_larval *)q) &&
75 		    ((struct crypto_larval *)q)->mask != mask)
76 			continue;
77 
78 		exact = !strcmp(q->cra_driver_name, name);
79 		fuzzy = !strcmp(q->cra_name, name);
80 		if (!exact && !(fuzzy && q->cra_priority > best))
81 			continue;
82 
83 		if (unlikely(!crypto_mod_get(q)))
84 			continue;
85 
86 		best = q->cra_priority;
87 		if (alg)
88 			crypto_mod_put(alg);
89 		alg = q;
90 
91 		if (exact)
92 			break;
93 	}
94 
95 	return alg;
96 }
97 
98 static void crypto_larval_destroy(struct crypto_alg *alg)
99 {
100 	struct crypto_larval *larval = (void *)alg;
101 
102 	BUG_ON(!crypto_is_larval(alg));
103 	if (!IS_ERR_OR_NULL(larval->adult))
104 		crypto_mod_put(larval->adult);
105 	kfree(larval);
106 }
107 
108 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
109 {
110 	struct crypto_larval *larval;
111 
112 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
113 	if (!larval)
114 		return ERR_PTR(-ENOMEM);
115 
116 	larval->mask = mask;
117 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
118 	larval->alg.cra_priority = -1;
119 	larval->alg.cra_destroy = crypto_larval_destroy;
120 
121 	strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
122 	init_completion(&larval->completion);
123 
124 	return larval;
125 }
126 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
127 
128 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
129 					    u32 mask)
130 {
131 	struct crypto_alg *alg;
132 	struct crypto_larval *larval;
133 
134 	larval = crypto_larval_alloc(name, type, mask);
135 	if (IS_ERR(larval))
136 		return ERR_CAST(larval);
137 
138 	refcount_set(&larval->alg.cra_refcnt, 2);
139 
140 	down_write(&crypto_alg_sem);
141 	alg = __crypto_alg_lookup(name, type, mask);
142 	if (!alg) {
143 		alg = &larval->alg;
144 		list_add(&alg->cra_list, &crypto_alg_list);
145 	}
146 	up_write(&crypto_alg_sem);
147 
148 	if (alg != &larval->alg) {
149 		kfree(larval);
150 		if (crypto_is_larval(alg))
151 			alg = crypto_larval_wait(alg);
152 	}
153 
154 	return alg;
155 }
156 
157 static void crypto_larval_kill(struct crypto_larval *larval)
158 {
159 	bool unlinked;
160 
161 	down_write(&crypto_alg_sem);
162 	unlinked = list_empty(&larval->alg.cra_list);
163 	if (!unlinked)
164 		list_del_init(&larval->alg.cra_list);
165 	up_write(&crypto_alg_sem);
166 
167 	if (unlinked)
168 		return;
169 
170 	complete_all(&larval->completion);
171 	crypto_alg_put(&larval->alg);
172 }
173 
174 void crypto_schedule_test(struct crypto_larval *larval)
175 {
176 	int err;
177 
178 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
179 	WARN_ON_ONCE(err != NOTIFY_STOP);
180 }
181 EXPORT_SYMBOL_GPL(crypto_schedule_test);
182 
183 static void crypto_start_test(struct crypto_larval *larval)
184 {
185 	if (!crypto_is_test_larval(larval))
186 		return;
187 
188 	if (larval->test_started)
189 		return;
190 
191 	down_write(&crypto_alg_sem);
192 	if (larval->test_started) {
193 		up_write(&crypto_alg_sem);
194 		return;
195 	}
196 
197 	larval->test_started = true;
198 	up_write(&crypto_alg_sem);
199 
200 	crypto_schedule_test(larval);
201 }
202 
203 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
204 {
205 	struct crypto_larval *larval;
206 	long time_left;
207 
208 again:
209 	larval = container_of(alg, struct crypto_larval, alg);
210 
211 	if (!crypto_boot_test_finished())
212 		crypto_start_test(larval);
213 
214 	time_left = wait_for_completion_killable_timeout(
215 		&larval->completion, 60 * HZ);
216 
217 	alg = larval->adult;
218 	if (time_left < 0)
219 		alg = ERR_PTR(-EINTR);
220 	else if (!time_left) {
221 		if (crypto_is_test_larval(larval))
222 			crypto_larval_kill(larval);
223 		alg = ERR_PTR(-ETIMEDOUT);
224 	} else if (!alg) {
225 		u32 type;
226 		u32 mask;
227 
228 		alg = &larval->alg;
229 		type = alg->cra_flags & ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
230 		mask = larval->mask;
231 		alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
232 		      ERR_PTR(-ENOENT);
233 	} else if (IS_ERR(alg))
234 		;
235 	else if (crypto_is_test_larval(larval) &&
236 		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
237 		alg = ERR_PTR(-EAGAIN);
238 	else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
239 		alg = ERR_PTR(-EAGAIN);
240 	else if (!crypto_mod_get(alg))
241 		alg = ERR_PTR(-EAGAIN);
242 	crypto_mod_put(&larval->alg);
243 
244 	if (!IS_ERR(alg) && crypto_is_larval(alg))
245 		goto again;
246 
247 	return alg;
248 }
249 
250 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
251 					    u32 mask)
252 {
253 	const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
254 	struct crypto_alg *alg;
255 	u32 test = 0;
256 
257 	if (!((type | mask) & CRYPTO_ALG_TESTED))
258 		test |= CRYPTO_ALG_TESTED;
259 
260 	down_read(&crypto_alg_sem);
261 	alg = __crypto_alg_lookup(name, (type | test) & ~fips,
262 				  (mask | test) & ~fips);
263 	if (alg) {
264 		if (((type | mask) ^ fips) & fips)
265 			mask |= fips;
266 		mask &= fips;
267 
268 		if (!crypto_is_larval(alg) &&
269 		    ((type ^ alg->cra_flags) & mask)) {
270 			/* Algorithm is disallowed in FIPS mode. */
271 			crypto_mod_put(alg);
272 			alg = ERR_PTR(-ENOENT);
273 		}
274 	} else if (test) {
275 		alg = __crypto_alg_lookup(name, type, mask);
276 		if (alg && !crypto_is_larval(alg)) {
277 			/* Test failed */
278 			crypto_mod_put(alg);
279 			alg = ERR_PTR(-ELIBBAD);
280 		}
281 	}
282 	up_read(&crypto_alg_sem);
283 
284 	return alg;
285 }
286 
287 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
288 					       u32 mask)
289 {
290 	struct crypto_alg *alg;
291 
292 	if (!name)
293 		return ERR_PTR(-ENOENT);
294 
295 	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
296 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
297 
298 	alg = crypto_alg_lookup(name, type, mask);
299 	if (!alg && !(mask & CRYPTO_NOLOAD)) {
300 		request_module("crypto-%s", name);
301 
302 		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
303 		      CRYPTO_ALG_NEED_FALLBACK))
304 			request_module("crypto-%s-all", name);
305 
306 		alg = crypto_alg_lookup(name, type, mask);
307 	}
308 
309 	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
310 		alg = crypto_larval_wait(alg);
311 	else if (!alg)
312 		alg = crypto_larval_add(name, type, mask);
313 
314 	return alg;
315 }
316 
317 int crypto_probing_notify(unsigned long val, void *v)
318 {
319 	int ok;
320 
321 	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
322 	if (ok == NOTIFY_DONE) {
323 		request_module("cryptomgr");
324 		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
325 	}
326 
327 	return ok;
328 }
329 EXPORT_SYMBOL_GPL(crypto_probing_notify);
330 
331 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
332 {
333 	struct crypto_alg *alg;
334 	struct crypto_alg *larval;
335 	int ok;
336 
337 	/*
338 	 * If the internal flag is set for a cipher, require a caller to
339 	 * invoke the cipher with the internal flag to use that cipher.
340 	 * Also, if a caller wants to allocate a cipher that may or may
341 	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
342 	 * !(mask & CRYPTO_ALG_INTERNAL).
343 	 */
344 	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
345 		mask |= CRYPTO_ALG_INTERNAL;
346 
347 	larval = crypto_larval_lookup(name, type, mask);
348 	if (IS_ERR(larval) || !crypto_is_larval(larval))
349 		return larval;
350 
351 	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
352 
353 	if (ok == NOTIFY_STOP)
354 		alg = crypto_larval_wait(larval);
355 	else {
356 		crypto_mod_put(larval);
357 		alg = ERR_PTR(-ENOENT);
358 	}
359 	crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
360 	return alg;
361 }
362 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
363 
364 static void crypto_exit_ops(struct crypto_tfm *tfm)
365 {
366 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
367 
368 	if (type && tfm->exit)
369 		tfm->exit(tfm);
370 }
371 
372 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
373 {
374 	const struct crypto_type *type_obj = alg->cra_type;
375 	unsigned int len;
376 
377 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
378 	if (type_obj)
379 		return len + type_obj->ctxsize(alg, type, mask);
380 
381 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
382 	default:
383 		BUG();
384 
385 	case CRYPTO_ALG_TYPE_CIPHER:
386 		len += crypto_cipher_ctxsize(alg);
387 		break;
388 
389 	case CRYPTO_ALG_TYPE_COMPRESS:
390 		len += crypto_compress_ctxsize(alg);
391 		break;
392 	}
393 
394 	return len;
395 }
396 
397 void crypto_shoot_alg(struct crypto_alg *alg)
398 {
399 	down_write(&crypto_alg_sem);
400 	alg->cra_flags |= CRYPTO_ALG_DYING;
401 	up_write(&crypto_alg_sem);
402 }
403 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
404 
405 struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
406 					 u32 mask, gfp_t gfp)
407 {
408 	struct crypto_tfm *tfm;
409 	unsigned int tfm_size;
410 	int err = -ENOMEM;
411 
412 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
413 	tfm = kzalloc(tfm_size, gfp);
414 	if (tfm == NULL)
415 		goto out_err;
416 
417 	tfm->__crt_alg = alg;
418 	refcount_set(&tfm->refcnt, 1);
419 
420 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
421 		goto cra_init_failed;
422 
423 	goto out;
424 
425 cra_init_failed:
426 	crypto_exit_ops(tfm);
427 	if (err == -EAGAIN)
428 		crypto_shoot_alg(alg);
429 	kfree(tfm);
430 out_err:
431 	tfm = ERR_PTR(err);
432 out:
433 	return tfm;
434 }
435 EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
436 
437 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
438 				      u32 mask)
439 {
440 	return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
441 }
442 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
443 
444 /*
445  *	crypto_alloc_base - Locate algorithm and allocate transform
446  *	@alg_name: Name of algorithm
447  *	@type: Type of algorithm
448  *	@mask: Mask for type comparison
449  *
450  *	This function should not be used by new algorithm types.
451  *	Please use crypto_alloc_tfm instead.
452  *
453  *	crypto_alloc_base() will first attempt to locate an already loaded
454  *	algorithm.  If that fails and the kernel supports dynamically loadable
455  *	modules, it will then attempt to load a module of the same name or
456  *	alias.  If that fails it will send a query to any loaded crypto manager
457  *	to construct an algorithm on the fly.  A refcount is grabbed on the
458  *	algorithm which is then associated with the new transform.
459  *
460  *	The returned transform is of a non-determinate type.  Most people
461  *	should use one of the more specific allocation functions such as
462  *	crypto_alloc_skcipher().
463  *
464  *	In case of error the return value is an error pointer.
465  */
466 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
467 {
468 	struct crypto_tfm *tfm;
469 	int err;
470 
471 	for (;;) {
472 		struct crypto_alg *alg;
473 
474 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
475 		if (IS_ERR(alg)) {
476 			err = PTR_ERR(alg);
477 			goto err;
478 		}
479 
480 		tfm = __crypto_alloc_tfm(alg, type, mask);
481 		if (!IS_ERR(tfm))
482 			return tfm;
483 
484 		crypto_mod_put(alg);
485 		err = PTR_ERR(tfm);
486 
487 err:
488 		if (err != -EAGAIN)
489 			break;
490 		if (fatal_signal_pending(current)) {
491 			err = -EINTR;
492 			break;
493 		}
494 	}
495 
496 	return ERR_PTR(err);
497 }
498 EXPORT_SYMBOL_GPL(crypto_alloc_base);
499 
500 static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
501 				 const struct crypto_type *frontend, int node,
502 				 gfp_t gfp)
503 {
504 	struct crypto_tfm *tfm;
505 	unsigned int tfmsize;
506 	unsigned int total;
507 	char *mem;
508 
509 	tfmsize = frontend->tfmsize;
510 	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
511 
512 	mem = kzalloc_node(total, gfp, node);
513 	if (mem == NULL)
514 		return ERR_PTR(-ENOMEM);
515 
516 	tfm = (struct crypto_tfm *)(mem + tfmsize);
517 	tfm->__crt_alg = alg;
518 	tfm->node = node;
519 	refcount_set(&tfm->refcnt, 1);
520 
521 	return mem;
522 }
523 
524 void *crypto_create_tfm_node(struct crypto_alg *alg,
525 			     const struct crypto_type *frontend,
526 			     int node)
527 {
528 	struct crypto_tfm *tfm;
529 	char *mem;
530 	int err;
531 
532 	mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
533 	if (IS_ERR(mem))
534 		goto out;
535 
536 	tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
537 
538 	err = frontend->init_tfm(tfm);
539 	if (err)
540 		goto out_free_tfm;
541 
542 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
543 		goto cra_init_failed;
544 
545 	goto out;
546 
547 cra_init_failed:
548 	crypto_exit_ops(tfm);
549 out_free_tfm:
550 	if (err == -EAGAIN)
551 		crypto_shoot_alg(alg);
552 	kfree(mem);
553 	mem = ERR_PTR(err);
554 out:
555 	return mem;
556 }
557 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
558 
559 void *crypto_clone_tfm(const struct crypto_type *frontend,
560 		       struct crypto_tfm *otfm)
561 {
562 	struct crypto_alg *alg = otfm->__crt_alg;
563 	struct crypto_tfm *tfm;
564 	char *mem;
565 
566 	mem = ERR_PTR(-ESTALE);
567 	if (unlikely(!crypto_mod_get(alg)))
568 		goto out;
569 
570 	mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
571 	if (IS_ERR(mem)) {
572 		crypto_mod_put(alg);
573 		goto out;
574 	}
575 
576 	tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
577 	tfm->crt_flags = otfm->crt_flags;
578 	tfm->exit = otfm->exit;
579 
580 out:
581 	return mem;
582 }
583 EXPORT_SYMBOL_GPL(crypto_clone_tfm);
584 
585 struct crypto_alg *crypto_find_alg(const char *alg_name,
586 				   const struct crypto_type *frontend,
587 				   u32 type, u32 mask)
588 {
589 	if (frontend) {
590 		type &= frontend->maskclear;
591 		mask &= frontend->maskclear;
592 		type |= frontend->type;
593 		mask |= frontend->maskset;
594 	}
595 
596 	return crypto_alg_mod_lookup(alg_name, type, mask);
597 }
598 EXPORT_SYMBOL_GPL(crypto_find_alg);
599 
600 /*
601  *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
602  *	@alg_name: Name of algorithm
603  *	@frontend: Frontend algorithm type
604  *	@type: Type of algorithm
605  *	@mask: Mask for type comparison
606  *	@node: NUMA node in which users desire to put requests, if node is
607  *		NUMA_NO_NODE, it means users have no special requirement.
608  *
609  *	crypto_alloc_tfm() will first attempt to locate an already loaded
610  *	algorithm.  If that fails and the kernel supports dynamically loadable
611  *	modules, it will then attempt to load a module of the same name or
612  *	alias.  If that fails it will send a query to any loaded crypto manager
613  *	to construct an algorithm on the fly.  A refcount is grabbed on the
614  *	algorithm which is then associated with the new transform.
615  *
616  *	The returned transform is of a non-determinate type.  Most people
617  *	should use one of the more specific allocation functions such as
618  *	crypto_alloc_skcipher().
619  *
620  *	In case of error the return value is an error pointer.
621  */
622 
623 void *crypto_alloc_tfm_node(const char *alg_name,
624 		       const struct crypto_type *frontend, u32 type, u32 mask,
625 		       int node)
626 {
627 	void *tfm;
628 	int err;
629 
630 	for (;;) {
631 		struct crypto_alg *alg;
632 
633 		alg = crypto_find_alg(alg_name, frontend, type, mask);
634 		if (IS_ERR(alg)) {
635 			err = PTR_ERR(alg);
636 			goto err;
637 		}
638 
639 		tfm = crypto_create_tfm_node(alg, frontend, node);
640 		if (!IS_ERR(tfm))
641 			return tfm;
642 
643 		crypto_mod_put(alg);
644 		err = PTR_ERR(tfm);
645 
646 err:
647 		if (err != -EAGAIN)
648 			break;
649 		if (fatal_signal_pending(current)) {
650 			err = -EINTR;
651 			break;
652 		}
653 	}
654 
655 	return ERR_PTR(err);
656 }
657 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
658 
659 /*
660  *	crypto_destroy_tfm - Free crypto transform
661  *	@mem: Start of tfm slab
662  *	@tfm: Transform to free
663  *
664  *	This function frees up the transform and any associated resources,
665  *	then drops the refcount on the associated algorithm.
666  */
667 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
668 {
669 	struct crypto_alg *alg;
670 
671 	if (IS_ERR_OR_NULL(mem))
672 		return;
673 
674 	if (!refcount_dec_and_test(&tfm->refcnt))
675 		return;
676 	alg = tfm->__crt_alg;
677 
678 	if (!tfm->exit && alg->cra_exit)
679 		alg->cra_exit(tfm);
680 	crypto_exit_ops(tfm);
681 	crypto_mod_put(alg);
682 	kfree_sensitive(mem);
683 }
684 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
685 
686 int crypto_has_alg(const char *name, u32 type, u32 mask)
687 {
688 	int ret = 0;
689 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
690 
691 	if (!IS_ERR(alg)) {
692 		crypto_mod_put(alg);
693 		ret = 1;
694 	}
695 
696 	return ret;
697 }
698 EXPORT_SYMBOL_GPL(crypto_has_alg);
699 
700 void crypto_req_done(void *data, int err)
701 {
702 	struct crypto_wait *wait = data;
703 
704 	if (err == -EINPROGRESS)
705 		return;
706 
707 	wait->err = err;
708 	complete(&wait->completion);
709 }
710 EXPORT_SYMBOL_GPL(crypto_req_done);
711 
712 MODULE_DESCRIPTION("Cryptographic core API");
713 MODULE_LICENSE("GPL");
714