xref: /linux/crypto/algapi.c (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 #include <linux/workqueue.h>
20 
21 #include "internal.h"
22 
23 static LIST_HEAD(crypto_template_list);
24 
25 static inline void crypto_check_module_sig(struct module *mod)
26 {
27 	if (fips_enabled && mod && !module_sig_ok(mod))
28 		panic("Module %s signature verification failed in FIPS mode\n",
29 		      module_name(mod));
30 }
31 
32 static int crypto_check_alg(struct crypto_alg *alg)
33 {
34 	crypto_check_module_sig(alg->cra_module);
35 
36 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
37 		return -EINVAL;
38 
39 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
40 		return -EINVAL;
41 
42 	/* General maximums for all algs. */
43 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
44 		return -EINVAL;
45 
46 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
47 		return -EINVAL;
48 
49 	/* Lower maximums for specific alg types. */
50 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51 			       CRYPTO_ALG_TYPE_CIPHER) {
52 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
53 			return -EINVAL;
54 
55 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
56 			return -EINVAL;
57 	}
58 
59 	if (alg->cra_priority < 0)
60 		return -EINVAL;
61 
62 	refcount_set(&alg->cra_refcnt, 1);
63 
64 	return 0;
65 }
66 
67 static void crypto_free_instance(struct crypto_instance *inst)
68 {
69 	inst->alg.cra_type->free(inst);
70 }
71 
72 static void crypto_destroy_instance_workfn(struct work_struct *w)
73 {
74 	struct crypto_instance *inst = container_of(w, struct crypto_instance,
75 						    free_work);
76 	struct crypto_template *tmpl = inst->tmpl;
77 
78 	crypto_free_instance(inst);
79 	crypto_tmpl_put(tmpl);
80 }
81 
82 static void crypto_destroy_instance(struct crypto_alg *alg)
83 {
84 	struct crypto_instance *inst = container_of(alg,
85 						    struct crypto_instance,
86 						    alg);
87 
88 	INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
89 	schedule_work(&inst->free_work);
90 }
91 
92 /*
93  * This function adds a spawn to the list secondary_spawns which
94  * will be used at the end of crypto_remove_spawns to unregister
95  * instances, unless the spawn happens to be one that is depended
96  * on by the new algorithm (nalg in crypto_remove_spawns).
97  *
98  * This function is also responsible for resurrecting any algorithms
99  * in the dependency chain of nalg by unsetting n->dead.
100  */
101 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
102 					    struct list_head *stack,
103 					    struct list_head *top,
104 					    struct list_head *secondary_spawns)
105 {
106 	struct crypto_spawn *spawn, *n;
107 
108 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
109 	if (!spawn)
110 		return NULL;
111 
112 	n = list_prev_entry(spawn, list);
113 	list_move(&spawn->list, secondary_spawns);
114 
115 	if (list_is_last(&n->list, stack))
116 		return top;
117 
118 	n = list_next_entry(n, list);
119 	if (!spawn->dead)
120 		n->dead = false;
121 
122 	return &n->inst->alg.cra_users;
123 }
124 
125 static void crypto_remove_instance(struct crypto_instance *inst,
126 				   struct list_head *list)
127 {
128 	struct crypto_template *tmpl = inst->tmpl;
129 
130 	if (crypto_is_dead(&inst->alg))
131 		return;
132 
133 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
134 
135 	if (!tmpl || !crypto_tmpl_get(tmpl))
136 		return;
137 
138 	list_move(&inst->alg.cra_list, list);
139 	hlist_del(&inst->list);
140 	inst->alg.cra_destroy = crypto_destroy_instance;
141 
142 	BUG_ON(!list_empty(&inst->alg.cra_users));
143 }
144 
145 /*
146  * Given an algorithm alg, remove all algorithms that depend on it
147  * through spawns.  If nalg is not null, then exempt any algorithms
148  * that is depended on by nalg.  This is useful when nalg itself
149  * depends on alg.
150  */
151 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
152 			  struct crypto_alg *nalg)
153 {
154 	u32 new_type = (nalg ?: alg)->cra_flags;
155 	struct crypto_spawn *spawn, *n;
156 	LIST_HEAD(secondary_spawns);
157 	struct list_head *spawns;
158 	LIST_HEAD(stack);
159 	LIST_HEAD(top);
160 
161 	spawns = &alg->cra_users;
162 	list_for_each_entry_safe(spawn, n, spawns, list) {
163 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
164 			continue;
165 
166 		list_move(&spawn->list, &top);
167 	}
168 
169 	/*
170 	 * Perform a depth-first walk starting from alg through
171 	 * the cra_users tree.  The list stack records the path
172 	 * from alg to the current spawn.
173 	 */
174 	spawns = &top;
175 	do {
176 		while (!list_empty(spawns)) {
177 			struct crypto_instance *inst;
178 
179 			spawn = list_first_entry(spawns, struct crypto_spawn,
180 						 list);
181 			inst = spawn->inst;
182 
183 			list_move(&spawn->list, &stack);
184 			spawn->dead = !spawn->registered || &inst->alg != nalg;
185 
186 			if (!spawn->registered)
187 				break;
188 
189 			BUG_ON(&inst->alg == alg);
190 
191 			if (&inst->alg == nalg)
192 				break;
193 
194 			spawns = &inst->alg.cra_users;
195 
196 			/*
197 			 * Even if spawn->registered is true, the
198 			 * instance itself may still be unregistered.
199 			 * This is because it may have failed during
200 			 * registration.  Therefore we still need to
201 			 * make the following test.
202 			 *
203 			 * We may encounter an unregistered instance here, since
204 			 * an instance's spawns are set up prior to the instance
205 			 * being registered.  An unregistered instance will have
206 			 * NULL ->cra_users.next, since ->cra_users isn't
207 			 * properly initialized until registration.  But an
208 			 * unregistered instance cannot have any users, so treat
209 			 * it the same as ->cra_users being empty.
210 			 */
211 			if (spawns->next == NULL)
212 				break;
213 		}
214 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
215 					      &secondary_spawns)));
216 
217 	/*
218 	 * Remove all instances that are marked as dead.  Also
219 	 * complete the resurrection of the others by moving them
220 	 * back to the cra_users list.
221 	 */
222 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
223 		if (!spawn->dead)
224 			list_move(&spawn->list, &spawn->alg->cra_users);
225 		else if (spawn->registered)
226 			crypto_remove_instance(spawn->inst, list);
227 	}
228 }
229 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
230 
231 static void crypto_alg_finish_registration(struct crypto_alg *alg,
232 					   struct list_head *algs_to_put)
233 {
234 	struct crypto_alg *q;
235 
236 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
237 		if (q == alg)
238 			continue;
239 
240 		if (crypto_is_moribund(q))
241 			continue;
242 
243 		if (crypto_is_larval(q))
244 			continue;
245 
246 		if (strcmp(alg->cra_name, q->cra_name))
247 			continue;
248 
249 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
250 		    q->cra_priority > alg->cra_priority)
251 			continue;
252 
253 		crypto_remove_spawns(q, algs_to_put, alg);
254 	}
255 
256 	crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
257 }
258 
259 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
260 {
261 	struct crypto_larval *larval;
262 
263 	if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
264 	    IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
265 	    (alg->cra_flags & CRYPTO_ALG_INTERNAL))
266 		return NULL; /* No self-test needed */
267 
268 	larval = crypto_larval_alloc(alg->cra_name,
269 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
270 	if (IS_ERR(larval))
271 		return larval;
272 
273 	larval->adult = crypto_mod_get(alg);
274 	if (!larval->adult) {
275 		kfree(larval);
276 		return ERR_PTR(-ENOENT);
277 	}
278 
279 	refcount_set(&larval->alg.cra_refcnt, 1);
280 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
281 	       CRYPTO_MAX_ALG_NAME);
282 	larval->alg.cra_priority = alg->cra_priority;
283 
284 	return larval;
285 }
286 
287 static struct crypto_larval *
288 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
289 {
290 	struct crypto_alg *q;
291 	struct crypto_larval *larval;
292 	int ret = -EAGAIN;
293 
294 	if (crypto_is_dead(alg))
295 		goto err;
296 
297 	INIT_LIST_HEAD(&alg->cra_users);
298 
299 	ret = -EEXIST;
300 
301 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
302 		if (q == alg)
303 			goto err;
304 
305 		if (crypto_is_moribund(q))
306 			continue;
307 
308 		if (crypto_is_larval(q)) {
309 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
310 				goto err;
311 			continue;
312 		}
313 
314 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
315 		    !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
316 		    !strcmp(q->cra_name, alg->cra_driver_name))
317 			goto err;
318 	}
319 
320 	larval = crypto_alloc_test_larval(alg);
321 	if (IS_ERR(larval))
322 		goto out;
323 
324 	list_add(&alg->cra_list, &crypto_alg_list);
325 
326 	if (larval) {
327 		/* No cheating! */
328 		alg->cra_flags &= ~CRYPTO_ALG_TESTED;
329 
330 		list_add(&larval->alg.cra_list, &crypto_alg_list);
331 	} else {
332 		alg->cra_flags |= CRYPTO_ALG_TESTED;
333 		crypto_alg_finish_registration(alg, algs_to_put);
334 	}
335 
336 out:
337 	return larval;
338 
339 err:
340 	larval = ERR_PTR(ret);
341 	goto out;
342 }
343 
344 void crypto_alg_tested(const char *name, int err)
345 {
346 	struct crypto_larval *test;
347 	struct crypto_alg *alg;
348 	struct crypto_alg *q;
349 	LIST_HEAD(list);
350 
351 	down_write(&crypto_alg_sem);
352 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
353 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
354 			continue;
355 
356 		test = (struct crypto_larval *)q;
357 
358 		if (!strcmp(q->cra_driver_name, name))
359 			goto found;
360 	}
361 
362 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
363 	up_write(&crypto_alg_sem);
364 	return;
365 
366 found:
367 	q->cra_flags |= CRYPTO_ALG_DEAD;
368 	alg = test->adult;
369 
370 	if (crypto_is_dead(alg))
371 		goto complete;
372 
373 	if (err == -ECANCELED)
374 		alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
375 	else if (err)
376 		goto complete;
377 	else
378 		alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
379 
380 	alg->cra_flags |= CRYPTO_ALG_TESTED;
381 
382 	crypto_alg_finish_registration(alg, &list);
383 
384 complete:
385 	list_del_init(&test->alg.cra_list);
386 	complete_all(&test->completion);
387 
388 	up_write(&crypto_alg_sem);
389 
390 	crypto_alg_put(&test->alg);
391 	crypto_remove_final(&list);
392 }
393 EXPORT_SYMBOL_GPL(crypto_alg_tested);
394 
395 void crypto_remove_final(struct list_head *list)
396 {
397 	struct crypto_alg *alg;
398 	struct crypto_alg *n;
399 
400 	list_for_each_entry_safe(alg, n, list, cra_list) {
401 		list_del_init(&alg->cra_list);
402 		crypto_alg_put(alg);
403 	}
404 }
405 EXPORT_SYMBOL_GPL(crypto_remove_final);
406 
407 int crypto_register_alg(struct crypto_alg *alg)
408 {
409 	struct crypto_larval *larval;
410 	LIST_HEAD(algs_to_put);
411 	int err;
412 
413 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
414 	err = crypto_check_alg(alg);
415 	if (err)
416 		return err;
417 
418 	down_write(&crypto_alg_sem);
419 	larval = __crypto_register_alg(alg, &algs_to_put);
420 	if (!IS_ERR_OR_NULL(larval)) {
421 		bool test_started = crypto_boot_test_finished();
422 
423 		larval->test_started = test_started;
424 		if (test_started)
425 			crypto_schedule_test(larval);
426 	}
427 	up_write(&crypto_alg_sem);
428 
429 	if (IS_ERR(larval))
430 		return PTR_ERR(larval);
431 	crypto_remove_final(&algs_to_put);
432 	return 0;
433 }
434 EXPORT_SYMBOL_GPL(crypto_register_alg);
435 
436 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
437 {
438 	if (unlikely(list_empty(&alg->cra_list)))
439 		return -ENOENT;
440 
441 	alg->cra_flags |= CRYPTO_ALG_DEAD;
442 
443 	list_del_init(&alg->cra_list);
444 	crypto_remove_spawns(alg, list, NULL);
445 
446 	return 0;
447 }
448 
449 void crypto_unregister_alg(struct crypto_alg *alg)
450 {
451 	int ret;
452 	LIST_HEAD(list);
453 
454 	down_write(&crypto_alg_sem);
455 	ret = crypto_remove_alg(alg, &list);
456 	up_write(&crypto_alg_sem);
457 
458 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
459 		return;
460 
461 	if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
462 		return;
463 
464 	if (alg->cra_destroy)
465 		alg->cra_destroy(alg);
466 
467 	crypto_remove_final(&list);
468 }
469 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
470 
471 int crypto_register_algs(struct crypto_alg *algs, int count)
472 {
473 	int i, ret;
474 
475 	for (i = 0; i < count; i++) {
476 		ret = crypto_register_alg(&algs[i]);
477 		if (ret)
478 			goto err;
479 	}
480 
481 	return 0;
482 
483 err:
484 	for (--i; i >= 0; --i)
485 		crypto_unregister_alg(&algs[i]);
486 
487 	return ret;
488 }
489 EXPORT_SYMBOL_GPL(crypto_register_algs);
490 
491 void crypto_unregister_algs(struct crypto_alg *algs, int count)
492 {
493 	int i;
494 
495 	for (i = 0; i < count; i++)
496 		crypto_unregister_alg(&algs[i]);
497 }
498 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
499 
500 int crypto_register_template(struct crypto_template *tmpl)
501 {
502 	struct crypto_template *q;
503 	int err = -EEXIST;
504 
505 	down_write(&crypto_alg_sem);
506 
507 	crypto_check_module_sig(tmpl->module);
508 
509 	list_for_each_entry(q, &crypto_template_list, list) {
510 		if (q == tmpl)
511 			goto out;
512 	}
513 
514 	list_add(&tmpl->list, &crypto_template_list);
515 	err = 0;
516 out:
517 	up_write(&crypto_alg_sem);
518 	return err;
519 }
520 EXPORT_SYMBOL_GPL(crypto_register_template);
521 
522 int crypto_register_templates(struct crypto_template *tmpls, int count)
523 {
524 	int i, err;
525 
526 	for (i = 0; i < count; i++) {
527 		err = crypto_register_template(&tmpls[i]);
528 		if (err)
529 			goto out;
530 	}
531 	return 0;
532 
533 out:
534 	for (--i; i >= 0; --i)
535 		crypto_unregister_template(&tmpls[i]);
536 	return err;
537 }
538 EXPORT_SYMBOL_GPL(crypto_register_templates);
539 
540 void crypto_unregister_template(struct crypto_template *tmpl)
541 {
542 	struct crypto_instance *inst;
543 	struct hlist_node *n;
544 	struct hlist_head *list;
545 	LIST_HEAD(users);
546 
547 	down_write(&crypto_alg_sem);
548 
549 	BUG_ON(list_empty(&tmpl->list));
550 	list_del_init(&tmpl->list);
551 
552 	list = &tmpl->instances;
553 	hlist_for_each_entry(inst, list, list) {
554 		int err = crypto_remove_alg(&inst->alg, &users);
555 
556 		BUG_ON(err);
557 	}
558 
559 	up_write(&crypto_alg_sem);
560 
561 	hlist_for_each_entry_safe(inst, n, list, list) {
562 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
563 		crypto_free_instance(inst);
564 	}
565 	crypto_remove_final(&users);
566 }
567 EXPORT_SYMBOL_GPL(crypto_unregister_template);
568 
569 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
570 {
571 	int i;
572 
573 	for (i = count - 1; i >= 0; --i)
574 		crypto_unregister_template(&tmpls[i]);
575 }
576 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
577 
578 static struct crypto_template *__crypto_lookup_template(const char *name)
579 {
580 	struct crypto_template *q, *tmpl = NULL;
581 
582 	down_read(&crypto_alg_sem);
583 	list_for_each_entry(q, &crypto_template_list, list) {
584 		if (strcmp(q->name, name))
585 			continue;
586 		if (unlikely(!crypto_tmpl_get(q)))
587 			continue;
588 
589 		tmpl = q;
590 		break;
591 	}
592 	up_read(&crypto_alg_sem);
593 
594 	return tmpl;
595 }
596 
597 struct crypto_template *crypto_lookup_template(const char *name)
598 {
599 	return try_then_request_module(__crypto_lookup_template(name),
600 				       "crypto-%s", name);
601 }
602 EXPORT_SYMBOL_GPL(crypto_lookup_template);
603 
604 int crypto_register_instance(struct crypto_template *tmpl,
605 			     struct crypto_instance *inst)
606 {
607 	struct crypto_larval *larval;
608 	struct crypto_spawn *spawn;
609 	u32 fips_internal = 0;
610 	LIST_HEAD(algs_to_put);
611 	int err;
612 
613 	err = crypto_check_alg(&inst->alg);
614 	if (err)
615 		return err;
616 
617 	inst->alg.cra_module = tmpl->module;
618 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
619 
620 	down_write(&crypto_alg_sem);
621 
622 	larval = ERR_PTR(-EAGAIN);
623 	for (spawn = inst->spawns; spawn;) {
624 		struct crypto_spawn *next;
625 
626 		if (spawn->dead)
627 			goto unlock;
628 
629 		next = spawn->next;
630 		spawn->inst = inst;
631 		spawn->registered = true;
632 
633 		fips_internal |= spawn->alg->cra_flags;
634 
635 		crypto_mod_put(spawn->alg);
636 
637 		spawn = next;
638 	}
639 
640 	inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
641 
642 	larval = __crypto_register_alg(&inst->alg, &algs_to_put);
643 	if (IS_ERR(larval))
644 		goto unlock;
645 	else if (larval) {
646 		larval->test_started = true;
647 		crypto_schedule_test(larval);
648 	}
649 
650 	hlist_add_head(&inst->list, &tmpl->instances);
651 	inst->tmpl = tmpl;
652 
653 unlock:
654 	up_write(&crypto_alg_sem);
655 
656 	if (IS_ERR(larval))
657 		return PTR_ERR(larval);
658 	crypto_remove_final(&algs_to_put);
659 	return 0;
660 }
661 EXPORT_SYMBOL_GPL(crypto_register_instance);
662 
663 void crypto_unregister_instance(struct crypto_instance *inst)
664 {
665 	LIST_HEAD(list);
666 
667 	down_write(&crypto_alg_sem);
668 
669 	crypto_remove_spawns(&inst->alg, &list, NULL);
670 	crypto_remove_instance(inst, &list);
671 
672 	up_write(&crypto_alg_sem);
673 
674 	crypto_remove_final(&list);
675 }
676 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
677 
678 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
679 		      const char *name, u32 type, u32 mask)
680 {
681 	struct crypto_alg *alg;
682 	int err = -EAGAIN;
683 
684 	if (WARN_ON_ONCE(inst == NULL))
685 		return -EINVAL;
686 
687 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
688 	if (IS_ERR(name))
689 		return PTR_ERR(name);
690 
691 	alg = crypto_find_alg(name, spawn->frontend,
692 			      type | CRYPTO_ALG_FIPS_INTERNAL, mask);
693 	if (IS_ERR(alg))
694 		return PTR_ERR(alg);
695 
696 	down_write(&crypto_alg_sem);
697 	if (!crypto_is_moribund(alg)) {
698 		list_add(&spawn->list, &alg->cra_users);
699 		spawn->alg = alg;
700 		spawn->mask = mask;
701 		spawn->next = inst->spawns;
702 		inst->spawns = spawn;
703 		inst->alg.cra_flags |=
704 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
705 		err = 0;
706 	}
707 	up_write(&crypto_alg_sem);
708 	if (err)
709 		crypto_mod_put(alg);
710 	return err;
711 }
712 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
713 
714 void crypto_drop_spawn(struct crypto_spawn *spawn)
715 {
716 	if (!spawn->alg) /* not yet initialized? */
717 		return;
718 
719 	down_write(&crypto_alg_sem);
720 	if (!spawn->dead)
721 		list_del(&spawn->list);
722 	up_write(&crypto_alg_sem);
723 
724 	if (!spawn->registered)
725 		crypto_mod_put(spawn->alg);
726 }
727 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
728 
729 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
730 {
731 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
732 	struct crypto_alg *target;
733 	bool shoot = false;
734 
735 	down_read(&crypto_alg_sem);
736 	if (!spawn->dead) {
737 		alg = spawn->alg;
738 		if (!crypto_mod_get(alg)) {
739 			target = crypto_alg_get(alg);
740 			shoot = true;
741 			alg = ERR_PTR(-EAGAIN);
742 		}
743 	}
744 	up_read(&crypto_alg_sem);
745 
746 	if (shoot) {
747 		crypto_shoot_alg(target);
748 		crypto_alg_put(target);
749 	}
750 
751 	return alg;
752 }
753 
754 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
755 				    u32 mask)
756 {
757 	struct crypto_alg *alg;
758 	struct crypto_tfm *tfm;
759 
760 	alg = crypto_spawn_alg(spawn);
761 	if (IS_ERR(alg))
762 		return ERR_CAST(alg);
763 
764 	tfm = ERR_PTR(-EINVAL);
765 	if (unlikely((alg->cra_flags ^ type) & mask))
766 		goto out_put_alg;
767 
768 	tfm = __crypto_alloc_tfm(alg, type, mask);
769 	if (IS_ERR(tfm))
770 		goto out_put_alg;
771 
772 	return tfm;
773 
774 out_put_alg:
775 	crypto_mod_put(alg);
776 	return tfm;
777 }
778 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
779 
780 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
781 {
782 	struct crypto_alg *alg;
783 	struct crypto_tfm *tfm;
784 
785 	alg = crypto_spawn_alg(spawn);
786 	if (IS_ERR(alg))
787 		return ERR_CAST(alg);
788 
789 	tfm = crypto_create_tfm(alg, spawn->frontend);
790 	if (IS_ERR(tfm))
791 		goto out_put_alg;
792 
793 	return tfm;
794 
795 out_put_alg:
796 	crypto_mod_put(alg);
797 	return tfm;
798 }
799 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
800 
801 int crypto_register_notifier(struct notifier_block *nb)
802 {
803 	return blocking_notifier_chain_register(&crypto_chain, nb);
804 }
805 EXPORT_SYMBOL_GPL(crypto_register_notifier);
806 
807 int crypto_unregister_notifier(struct notifier_block *nb)
808 {
809 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
810 }
811 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
812 
813 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
814 {
815 	struct rtattr *rta = tb[0];
816 	struct crypto_attr_type *algt;
817 
818 	if (!rta)
819 		return ERR_PTR(-ENOENT);
820 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
821 		return ERR_PTR(-EINVAL);
822 	if (rta->rta_type != CRYPTOA_TYPE)
823 		return ERR_PTR(-EINVAL);
824 
825 	algt = RTA_DATA(rta);
826 
827 	return algt;
828 }
829 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
830 
831 /**
832  * crypto_check_attr_type() - check algorithm type and compute inherited mask
833  * @tb: the template parameters
834  * @type: the algorithm type the template would be instantiated as
835  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
836  *	      to restrict the flags of any inner algorithms
837  *
838  * Validate that the algorithm type the user requested is compatible with the
839  * one the template would actually be instantiated as.  E.g., if the user is
840  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
841  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
842  *
843  * Also compute the mask to use to restrict the flags of any inner algorithms.
844  *
845  * Return: 0 on success; -errno on failure
846  */
847 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
848 {
849 	struct crypto_attr_type *algt;
850 
851 	algt = crypto_get_attr_type(tb);
852 	if (IS_ERR(algt))
853 		return PTR_ERR(algt);
854 
855 	if ((algt->type ^ type) & algt->mask)
856 		return -EINVAL;
857 
858 	*mask_ret = crypto_algt_inherited_mask(algt);
859 	return 0;
860 }
861 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
862 
863 const char *crypto_attr_alg_name(struct rtattr *rta)
864 {
865 	struct crypto_attr_alg *alga;
866 
867 	if (!rta)
868 		return ERR_PTR(-ENOENT);
869 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
870 		return ERR_PTR(-EINVAL);
871 	if (rta->rta_type != CRYPTOA_ALG)
872 		return ERR_PTR(-EINVAL);
873 
874 	alga = RTA_DATA(rta);
875 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
876 
877 	return alga->name;
878 }
879 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
880 
881 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
882 			struct crypto_alg *alg)
883 {
884 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
885 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
886 		return -ENAMETOOLONG;
887 
888 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
889 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
890 		return -ENAMETOOLONG;
891 
892 	return 0;
893 }
894 EXPORT_SYMBOL_GPL(crypto_inst_setname);
895 
896 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
897 {
898 	INIT_LIST_HEAD(&queue->list);
899 	queue->backlog = &queue->list;
900 	queue->qlen = 0;
901 	queue->max_qlen = max_qlen;
902 }
903 EXPORT_SYMBOL_GPL(crypto_init_queue);
904 
905 int crypto_enqueue_request(struct crypto_queue *queue,
906 			   struct crypto_async_request *request)
907 {
908 	int err = -EINPROGRESS;
909 
910 	if (unlikely(queue->qlen >= queue->max_qlen)) {
911 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
912 			err = -ENOSPC;
913 			goto out;
914 		}
915 		err = -EBUSY;
916 		if (queue->backlog == &queue->list)
917 			queue->backlog = &request->list;
918 	}
919 
920 	queue->qlen++;
921 	list_add_tail(&request->list, &queue->list);
922 
923 out:
924 	return err;
925 }
926 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
927 
928 void crypto_enqueue_request_head(struct crypto_queue *queue,
929 				 struct crypto_async_request *request)
930 {
931 	if (unlikely(queue->qlen >= queue->max_qlen))
932 		queue->backlog = queue->backlog->prev;
933 
934 	queue->qlen++;
935 	list_add(&request->list, &queue->list);
936 }
937 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
938 
939 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
940 {
941 	struct list_head *request;
942 
943 	if (unlikely(!queue->qlen))
944 		return NULL;
945 
946 	queue->qlen--;
947 
948 	if (queue->backlog != &queue->list)
949 		queue->backlog = queue->backlog->next;
950 
951 	request = queue->list.next;
952 	list_del(request);
953 
954 	return list_entry(request, struct crypto_async_request, list);
955 }
956 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
957 
958 static inline void crypto_inc_byte(u8 *a, unsigned int size)
959 {
960 	u8 *b = (a + size);
961 	u8 c;
962 
963 	for (; size; size--) {
964 		c = *--b + 1;
965 		*b = c;
966 		if (c)
967 			break;
968 	}
969 }
970 
971 void crypto_inc(u8 *a, unsigned int size)
972 {
973 	__be32 *b = (__be32 *)(a + size);
974 	u32 c;
975 
976 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
977 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
978 		for (; size >= 4; size -= 4) {
979 			c = be32_to_cpu(*--b) + 1;
980 			*b = cpu_to_be32(c);
981 			if (likely(c))
982 				return;
983 		}
984 
985 	crypto_inc_byte(a, size);
986 }
987 EXPORT_SYMBOL_GPL(crypto_inc);
988 
989 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
990 {
991 	return alg->cra_ctxsize +
992 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
993 }
994 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
995 
996 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
997 			u32 type, u32 mask)
998 {
999 	int ret = 0;
1000 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1001 
1002 	if (!IS_ERR(alg)) {
1003 		crypto_mod_put(alg);
1004 		ret = 1;
1005 	}
1006 
1007 	return ret;
1008 }
1009 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1010 
1011 static void __init crypto_start_tests(void)
1012 {
1013 	if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI))
1014 		return;
1015 
1016 	if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1017 		return;
1018 
1019 	for (;;) {
1020 		struct crypto_larval *larval = NULL;
1021 		struct crypto_alg *q;
1022 
1023 		down_write(&crypto_alg_sem);
1024 
1025 		list_for_each_entry(q, &crypto_alg_list, cra_list) {
1026 			struct crypto_larval *l;
1027 
1028 			if (!crypto_is_larval(q))
1029 				continue;
1030 
1031 			l = (void *)q;
1032 
1033 			if (!crypto_is_test_larval(l))
1034 				continue;
1035 
1036 			if (l->test_started)
1037 				continue;
1038 
1039 			l->test_started = true;
1040 			larval = l;
1041 			crypto_schedule_test(larval);
1042 			break;
1043 		}
1044 
1045 		up_write(&crypto_alg_sem);
1046 
1047 		if (!larval)
1048 			break;
1049 	}
1050 
1051 	set_crypto_boot_test_finished();
1052 }
1053 
1054 static int __init crypto_algapi_init(void)
1055 {
1056 	crypto_init_proc();
1057 	crypto_start_tests();
1058 	return 0;
1059 }
1060 
1061 static void __exit crypto_algapi_exit(void)
1062 {
1063 	crypto_exit_proc();
1064 }
1065 
1066 /*
1067  * We run this at late_initcall so that all the built-in algorithms
1068  * have had a chance to register themselves first.
1069  */
1070 late_initcall(crypto_algapi_init);
1071 module_exit(crypto_algapi_exit);
1072 
1073 MODULE_LICENSE("GPL");
1074 MODULE_DESCRIPTION("Cryptographic algorithms API");
1075 MODULE_SOFTDEP("pre: cryptomgr");
1076