xref: /linux/crypto/algapi.c (revision f144367d012929326f15a399394a9a8be4f98acb)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 #include <linux/workqueue.h>
20 
21 #include "internal.h"
22 
23 static LIST_HEAD(crypto_template_list);
24 
25 static inline void crypto_check_module_sig(struct module *mod)
26 {
27 	if (fips_enabled && mod && !module_sig_ok(mod))
28 		panic("Module %s signature verification failed in FIPS mode\n",
29 		      module_name(mod));
30 }
31 
32 static int crypto_check_alg(struct crypto_alg *alg)
33 {
34 	crypto_check_module_sig(alg->cra_module);
35 
36 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
37 		return -EINVAL;
38 
39 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
40 		return -EINVAL;
41 
42 	/* General maximums for all algs. */
43 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
44 		return -EINVAL;
45 
46 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
47 		return -EINVAL;
48 
49 	/* Lower maximums for specific alg types. */
50 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51 			       CRYPTO_ALG_TYPE_CIPHER) {
52 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
53 			return -EINVAL;
54 
55 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
56 			return -EINVAL;
57 	}
58 
59 	if (alg->cra_priority < 0)
60 		return -EINVAL;
61 
62 	refcount_set(&alg->cra_refcnt, 1);
63 
64 	return 0;
65 }
66 
67 static void crypto_free_instance(struct crypto_instance *inst)
68 {
69 	inst->alg.cra_type->free(inst);
70 }
71 
72 static void crypto_destroy_instance_workfn(struct work_struct *w)
73 {
74 	struct crypto_template *tmpl = container_of(w, struct crypto_template,
75 						    free_work);
76 	struct crypto_instance *inst;
77 	struct hlist_node *n;
78 	HLIST_HEAD(list);
79 
80 	down_write(&crypto_alg_sem);
81 	hlist_for_each_entry_safe(inst, n, &tmpl->dead, list) {
82 		if (refcount_read(&inst->alg.cra_refcnt) != -1)
83 			continue;
84 		hlist_del(&inst->list);
85 		hlist_add_head(&inst->list, &list);
86 	}
87 	up_write(&crypto_alg_sem);
88 
89 	hlist_for_each_entry_safe(inst, n, &list, list)
90 		crypto_free_instance(inst);
91 }
92 
93 static void crypto_destroy_instance(struct crypto_alg *alg)
94 {
95 	struct crypto_instance *inst = container_of(alg,
96 						    struct crypto_instance,
97 						    alg);
98 	struct crypto_template *tmpl = inst->tmpl;
99 
100 	refcount_set(&alg->cra_refcnt, -1);
101 	schedule_work(&tmpl->free_work);
102 }
103 
104 /*
105  * This function adds a spawn to the list secondary_spawns which
106  * will be used at the end of crypto_remove_spawns to unregister
107  * instances, unless the spawn happens to be one that is depended
108  * on by the new algorithm (nalg in crypto_remove_spawns).
109  *
110  * This function is also responsible for resurrecting any algorithms
111  * in the dependency chain of nalg by unsetting n->dead.
112  */
113 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
114 					    struct list_head *stack,
115 					    struct list_head *top,
116 					    struct list_head *secondary_spawns)
117 {
118 	struct crypto_spawn *spawn, *n;
119 
120 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
121 	if (!spawn)
122 		return NULL;
123 
124 	n = list_prev_entry(spawn, list);
125 	list_move(&spawn->list, secondary_spawns);
126 
127 	if (list_is_last(&n->list, stack))
128 		return top;
129 
130 	n = list_next_entry(n, list);
131 	if (!spawn->dead)
132 		n->dead = false;
133 
134 	return &n->inst->alg.cra_users;
135 }
136 
137 static void crypto_remove_instance(struct crypto_instance *inst,
138 				   struct list_head *list)
139 {
140 	struct crypto_template *tmpl = inst->tmpl;
141 
142 	if (crypto_is_dead(&inst->alg))
143 		return;
144 
145 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
146 
147 	if (!tmpl)
148 		return;
149 
150 	list_del_init(&inst->alg.cra_list);
151 	hlist_del(&inst->list);
152 	hlist_add_head(&inst->list, &tmpl->dead);
153 
154 	BUG_ON(!list_empty(&inst->alg.cra_users));
155 
156 	crypto_alg_put(&inst->alg);
157 }
158 
159 /*
160  * Given an algorithm alg, remove all algorithms that depend on it
161  * through spawns.  If nalg is not null, then exempt any algorithms
162  * that is depended on by nalg.  This is useful when nalg itself
163  * depends on alg.
164  */
165 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
166 			  struct crypto_alg *nalg)
167 {
168 	u32 new_type = (nalg ?: alg)->cra_flags;
169 	struct crypto_spawn *spawn, *n;
170 	LIST_HEAD(secondary_spawns);
171 	struct list_head *spawns;
172 	LIST_HEAD(stack);
173 	LIST_HEAD(top);
174 
175 	spawns = &alg->cra_users;
176 	list_for_each_entry_safe(spawn, n, spawns, list) {
177 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
178 			continue;
179 
180 		list_move(&spawn->list, &top);
181 	}
182 
183 	/*
184 	 * Perform a depth-first walk starting from alg through
185 	 * the cra_users tree.  The list stack records the path
186 	 * from alg to the current spawn.
187 	 */
188 	spawns = &top;
189 	do {
190 		while (!list_empty(spawns)) {
191 			struct crypto_instance *inst;
192 
193 			spawn = list_first_entry(spawns, struct crypto_spawn,
194 						 list);
195 			inst = spawn->inst;
196 
197 			list_move(&spawn->list, &stack);
198 			spawn->dead = !spawn->registered || &inst->alg != nalg;
199 
200 			if (!spawn->registered)
201 				break;
202 
203 			BUG_ON(&inst->alg == alg);
204 
205 			if (&inst->alg == nalg)
206 				break;
207 
208 			spawns = &inst->alg.cra_users;
209 
210 			/*
211 			 * Even if spawn->registered is true, the
212 			 * instance itself may still be unregistered.
213 			 * This is because it may have failed during
214 			 * registration.  Therefore we still need to
215 			 * make the following test.
216 			 *
217 			 * We may encounter an unregistered instance here, since
218 			 * an instance's spawns are set up prior to the instance
219 			 * being registered.  An unregistered instance will have
220 			 * NULL ->cra_users.next, since ->cra_users isn't
221 			 * properly initialized until registration.  But an
222 			 * unregistered instance cannot have any users, so treat
223 			 * it the same as ->cra_users being empty.
224 			 */
225 			if (spawns->next == NULL)
226 				break;
227 		}
228 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
229 					      &secondary_spawns)));
230 
231 	/*
232 	 * Remove all instances that are marked as dead.  Also
233 	 * complete the resurrection of the others by moving them
234 	 * back to the cra_users list.
235 	 */
236 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
237 		if (!spawn->dead)
238 			list_move(&spawn->list, &spawn->alg->cra_users);
239 		else if (spawn->registered)
240 			crypto_remove_instance(spawn->inst, list);
241 	}
242 }
243 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
244 
245 static void crypto_alg_finish_registration(struct crypto_alg *alg,
246 					   struct list_head *algs_to_put)
247 {
248 	struct crypto_alg *q;
249 
250 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
251 		if (q == alg)
252 			continue;
253 
254 		if (crypto_is_moribund(q))
255 			continue;
256 
257 		if (crypto_is_larval(q))
258 			continue;
259 
260 		if (strcmp(alg->cra_name, q->cra_name))
261 			continue;
262 
263 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
264 		    q->cra_priority > alg->cra_priority)
265 			continue;
266 
267 		crypto_remove_spawns(q, algs_to_put, alg);
268 	}
269 
270 	crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
271 }
272 
273 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
274 {
275 	struct crypto_larval *larval;
276 
277 	if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS) ||
278 	    (alg->cra_flags & CRYPTO_ALG_INTERNAL))
279 		return NULL; /* No self-test needed */
280 
281 	larval = crypto_larval_alloc(alg->cra_name,
282 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
283 	if (IS_ERR(larval))
284 		return larval;
285 
286 	larval->adult = crypto_mod_get(alg);
287 	if (!larval->adult) {
288 		kfree(larval);
289 		return ERR_PTR(-ENOENT);
290 	}
291 
292 	refcount_set(&larval->alg.cra_refcnt, 1);
293 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
294 	       CRYPTO_MAX_ALG_NAME);
295 	larval->alg.cra_priority = alg->cra_priority;
296 
297 	return larval;
298 }
299 
300 static struct crypto_larval *
301 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
302 {
303 	struct crypto_alg *q;
304 	struct crypto_larval *larval;
305 	int ret = -EAGAIN;
306 
307 	if (crypto_is_dead(alg))
308 		goto err;
309 
310 	INIT_LIST_HEAD(&alg->cra_users);
311 
312 	ret = -EEXIST;
313 
314 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
315 		if (q == alg)
316 			goto err;
317 
318 		if (crypto_is_moribund(q))
319 			continue;
320 
321 		if (crypto_is_larval(q)) {
322 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
323 				goto err;
324 			continue;
325 		}
326 
327 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
328 		    !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
329 		    !strcmp(q->cra_name, alg->cra_driver_name))
330 			goto err;
331 	}
332 
333 	larval = crypto_alloc_test_larval(alg);
334 	if (IS_ERR(larval))
335 		goto out;
336 
337 	list_add(&alg->cra_list, &crypto_alg_list);
338 
339 	if (larval) {
340 		/* No cheating! */
341 		alg->cra_flags &= ~CRYPTO_ALG_TESTED;
342 
343 		list_add(&larval->alg.cra_list, &crypto_alg_list);
344 	} else {
345 		alg->cra_flags |= CRYPTO_ALG_TESTED;
346 		crypto_alg_finish_registration(alg, algs_to_put);
347 	}
348 
349 out:
350 	return larval;
351 
352 err:
353 	larval = ERR_PTR(ret);
354 	goto out;
355 }
356 
357 void crypto_alg_tested(const char *name, int err)
358 {
359 	struct crypto_larval *test;
360 	struct crypto_alg *alg;
361 	struct crypto_alg *q;
362 	LIST_HEAD(list);
363 
364 	down_write(&crypto_alg_sem);
365 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
366 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
367 			continue;
368 
369 		test = (struct crypto_larval *)q;
370 
371 		if (!strcmp(q->cra_driver_name, name))
372 			goto found;
373 	}
374 
375 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
376 	up_write(&crypto_alg_sem);
377 	return;
378 
379 found:
380 	q->cra_flags |= CRYPTO_ALG_DEAD;
381 	alg = test->adult;
382 
383 	if (crypto_is_dead(alg))
384 		goto complete;
385 
386 	if (err == -ECANCELED)
387 		alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
388 	else if (err)
389 		goto complete;
390 	else
391 		alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
392 
393 	alg->cra_flags |= CRYPTO_ALG_TESTED;
394 
395 	crypto_alg_finish_registration(alg, &list);
396 
397 complete:
398 	list_del_init(&test->alg.cra_list);
399 	complete_all(&test->completion);
400 
401 	up_write(&crypto_alg_sem);
402 
403 	crypto_alg_put(&test->alg);
404 	crypto_remove_final(&list);
405 }
406 EXPORT_SYMBOL_GPL(crypto_alg_tested);
407 
408 void crypto_remove_final(struct list_head *list)
409 {
410 	struct crypto_alg *alg;
411 	struct crypto_alg *n;
412 
413 	list_for_each_entry_safe(alg, n, list, cra_list) {
414 		list_del_init(&alg->cra_list);
415 		crypto_alg_put(alg);
416 	}
417 }
418 EXPORT_SYMBOL_GPL(crypto_remove_final);
419 
420 static void crypto_free_alg(struct crypto_alg *alg)
421 {
422 	unsigned int algsize = alg->cra_type->algsize;
423 	u8 *p = (u8 *)alg - algsize;
424 
425 	crypto_destroy_alg(alg);
426 	kfree(p);
427 }
428 
429 int crypto_register_alg(struct crypto_alg *alg)
430 {
431 	struct crypto_larval *larval;
432 	bool test_started = false;
433 	LIST_HEAD(algs_to_put);
434 	int err;
435 
436 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
437 	err = crypto_check_alg(alg);
438 	if (err)
439 		return err;
440 
441 	if (alg->cra_flags & CRYPTO_ALG_DUP_FIRST &&
442 	    !WARN_ON_ONCE(alg->cra_destroy)) {
443 		unsigned int algsize = alg->cra_type->algsize;
444 		u8 *p = (u8 *)alg - algsize;
445 
446 		p = kmemdup(p, algsize + sizeof(*alg), GFP_KERNEL);
447 		if (!p)
448 			return -ENOMEM;
449 
450 		alg = (void *)(p + algsize);
451 		alg->cra_destroy = crypto_free_alg;
452 	}
453 
454 	down_write(&crypto_alg_sem);
455 	larval = __crypto_register_alg(alg, &algs_to_put);
456 	if (!IS_ERR_OR_NULL(larval)) {
457 		test_started = crypto_boot_test_finished();
458 		larval->test_started = test_started;
459 	}
460 	up_write(&crypto_alg_sem);
461 
462 	if (IS_ERR(larval)) {
463 		crypto_alg_put(alg);
464 		return PTR_ERR(larval);
465 	}
466 
467 	if (test_started)
468 		crypto_schedule_test(larval);
469 	else
470 		crypto_remove_final(&algs_to_put);
471 
472 	return 0;
473 }
474 EXPORT_SYMBOL_GPL(crypto_register_alg);
475 
476 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
477 {
478 	if (unlikely(list_empty(&alg->cra_list)))
479 		return -ENOENT;
480 
481 	alg->cra_flags |= CRYPTO_ALG_DEAD;
482 
483 	list_del_init(&alg->cra_list);
484 	crypto_remove_spawns(alg, list, NULL);
485 
486 	return 0;
487 }
488 
489 void crypto_unregister_alg(struct crypto_alg *alg)
490 {
491 	int ret;
492 	LIST_HEAD(list);
493 
494 	down_write(&crypto_alg_sem);
495 	ret = crypto_remove_alg(alg, &list);
496 	up_write(&crypto_alg_sem);
497 
498 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
499 		return;
500 
501 	WARN_ON(!alg->cra_destroy && refcount_read(&alg->cra_refcnt) != 1);
502 
503 	list_add(&alg->cra_list, &list);
504 	crypto_remove_final(&list);
505 }
506 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
507 
508 int crypto_register_algs(struct crypto_alg *algs, int count)
509 {
510 	int i, ret;
511 
512 	for (i = 0; i < count; i++) {
513 		ret = crypto_register_alg(&algs[i]);
514 		if (ret) {
515 			crypto_unregister_algs(algs, i);
516 			return ret;
517 		}
518 	}
519 
520 	return 0;
521 }
522 EXPORT_SYMBOL_GPL(crypto_register_algs);
523 
524 void crypto_unregister_algs(struct crypto_alg *algs, int count)
525 {
526 	int i;
527 
528 	for (i = count - 1; i >= 0; --i)
529 		crypto_unregister_alg(&algs[i]);
530 }
531 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
532 
533 int crypto_register_template(struct crypto_template *tmpl)
534 {
535 	struct crypto_template *q;
536 	int err = -EEXIST;
537 
538 	INIT_WORK(&tmpl->free_work, crypto_destroy_instance_workfn);
539 
540 	down_write(&crypto_alg_sem);
541 
542 	crypto_check_module_sig(tmpl->module);
543 
544 	list_for_each_entry(q, &crypto_template_list, list) {
545 		if (q == tmpl)
546 			goto out;
547 	}
548 
549 	list_add(&tmpl->list, &crypto_template_list);
550 	err = 0;
551 out:
552 	up_write(&crypto_alg_sem);
553 	return err;
554 }
555 EXPORT_SYMBOL_GPL(crypto_register_template);
556 
557 int crypto_register_templates(struct crypto_template *tmpls, int count)
558 {
559 	int i, err;
560 
561 	for (i = 0; i < count; i++) {
562 		err = crypto_register_template(&tmpls[i]);
563 		if (err)
564 			goto out;
565 	}
566 	return 0;
567 
568 out:
569 	for (--i; i >= 0; --i)
570 		crypto_unregister_template(&tmpls[i]);
571 	return err;
572 }
573 EXPORT_SYMBOL_GPL(crypto_register_templates);
574 
575 void crypto_unregister_template(struct crypto_template *tmpl)
576 {
577 	struct crypto_instance *inst;
578 	struct hlist_node *n;
579 	struct hlist_head *list;
580 	LIST_HEAD(users);
581 
582 	down_write(&crypto_alg_sem);
583 
584 	BUG_ON(list_empty(&tmpl->list));
585 	list_del_init(&tmpl->list);
586 
587 	list = &tmpl->instances;
588 	hlist_for_each_entry(inst, list, list) {
589 		int err = crypto_remove_alg(&inst->alg, &users);
590 
591 		BUG_ON(err);
592 	}
593 
594 	up_write(&crypto_alg_sem);
595 
596 	hlist_for_each_entry_safe(inst, n, list, list) {
597 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
598 		crypto_free_instance(inst);
599 	}
600 	crypto_remove_final(&users);
601 
602 	flush_work(&tmpl->free_work);
603 }
604 EXPORT_SYMBOL_GPL(crypto_unregister_template);
605 
606 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
607 {
608 	int i;
609 
610 	for (i = count - 1; i >= 0; --i)
611 		crypto_unregister_template(&tmpls[i]);
612 }
613 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
614 
615 static struct crypto_template *__crypto_lookup_template(const char *name)
616 {
617 	struct crypto_template *q, *tmpl = NULL;
618 
619 	down_read(&crypto_alg_sem);
620 	list_for_each_entry(q, &crypto_template_list, list) {
621 		if (strcmp(q->name, name))
622 			continue;
623 		if (unlikely(!crypto_tmpl_get(q)))
624 			continue;
625 
626 		tmpl = q;
627 		break;
628 	}
629 	up_read(&crypto_alg_sem);
630 
631 	return tmpl;
632 }
633 
634 struct crypto_template *crypto_lookup_template(const char *name)
635 {
636 	return try_then_request_module(__crypto_lookup_template(name),
637 				       "crypto-%s", name);
638 }
639 EXPORT_SYMBOL_GPL(crypto_lookup_template);
640 
641 int crypto_register_instance(struct crypto_template *tmpl,
642 			     struct crypto_instance *inst)
643 {
644 	struct crypto_larval *larval;
645 	struct crypto_spawn *spawn;
646 	u32 fips_internal = 0;
647 	LIST_HEAD(algs_to_put);
648 	int err;
649 
650 	err = crypto_check_alg(&inst->alg);
651 	if (err)
652 		return err;
653 
654 	inst->alg.cra_module = tmpl->module;
655 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
656 	inst->alg.cra_destroy = crypto_destroy_instance;
657 
658 	down_write(&crypto_alg_sem);
659 
660 	larval = ERR_PTR(-EAGAIN);
661 	for (spawn = inst->spawns; spawn;) {
662 		struct crypto_spawn *next;
663 
664 		if (spawn->dead)
665 			goto unlock;
666 
667 		next = spawn->next;
668 		spawn->inst = inst;
669 		spawn->registered = true;
670 
671 		fips_internal |= spawn->alg->cra_flags;
672 
673 		crypto_mod_put(spawn->alg);
674 
675 		spawn = next;
676 	}
677 
678 	inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
679 
680 	larval = __crypto_register_alg(&inst->alg, &algs_to_put);
681 	if (IS_ERR(larval))
682 		goto unlock;
683 	else if (larval)
684 		larval->test_started = true;
685 
686 	hlist_add_head(&inst->list, &tmpl->instances);
687 	inst->tmpl = tmpl;
688 
689 unlock:
690 	up_write(&crypto_alg_sem);
691 
692 	if (IS_ERR(larval))
693 		return PTR_ERR(larval);
694 
695 	if (larval)
696 		crypto_schedule_test(larval);
697 	else
698 		crypto_remove_final(&algs_to_put);
699 
700 	return 0;
701 }
702 EXPORT_SYMBOL_GPL(crypto_register_instance);
703 
704 void crypto_unregister_instance(struct crypto_instance *inst)
705 {
706 	LIST_HEAD(list);
707 
708 	down_write(&crypto_alg_sem);
709 
710 	crypto_remove_spawns(&inst->alg, &list, NULL);
711 	crypto_remove_instance(inst, &list);
712 
713 	up_write(&crypto_alg_sem);
714 
715 	crypto_remove_final(&list);
716 }
717 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
718 
719 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
720 		      const char *name, u32 type, u32 mask)
721 {
722 	struct crypto_alg *alg;
723 	int err = -EAGAIN;
724 
725 	if (WARN_ON_ONCE(inst == NULL))
726 		return -EINVAL;
727 
728 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
729 	if (IS_ERR(name))
730 		return PTR_ERR(name);
731 
732 	alg = crypto_find_alg(name, spawn->frontend,
733 			      type | CRYPTO_ALG_FIPS_INTERNAL, mask);
734 	if (IS_ERR(alg))
735 		return PTR_ERR(alg);
736 
737 	down_write(&crypto_alg_sem);
738 	if (!crypto_is_moribund(alg)) {
739 		list_add(&spawn->list, &alg->cra_users);
740 		spawn->alg = alg;
741 		spawn->mask = mask;
742 		spawn->next = inst->spawns;
743 		inst->spawns = spawn;
744 		inst->alg.cra_flags |=
745 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
746 		err = 0;
747 	}
748 	up_write(&crypto_alg_sem);
749 	if (err)
750 		crypto_mod_put(alg);
751 	return err;
752 }
753 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
754 
755 void crypto_drop_spawn(struct crypto_spawn *spawn)
756 {
757 	if (!spawn->alg) /* not yet initialized? */
758 		return;
759 
760 	down_write(&crypto_alg_sem);
761 	if (!spawn->dead)
762 		list_del(&spawn->list);
763 	up_write(&crypto_alg_sem);
764 
765 	if (!spawn->registered)
766 		crypto_mod_put(spawn->alg);
767 }
768 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
769 
770 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
771 {
772 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
773 	struct crypto_alg *target;
774 	bool shoot = false;
775 
776 	down_read(&crypto_alg_sem);
777 	if (!spawn->dead) {
778 		alg = spawn->alg;
779 		if (!crypto_mod_get(alg)) {
780 			target = crypto_alg_get(alg);
781 			shoot = true;
782 			alg = ERR_PTR(-EAGAIN);
783 		}
784 	}
785 	up_read(&crypto_alg_sem);
786 
787 	if (shoot) {
788 		crypto_shoot_alg(target);
789 		crypto_alg_put(target);
790 	}
791 
792 	return alg;
793 }
794 
795 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
796 				    u32 mask)
797 {
798 	struct crypto_alg *alg;
799 	struct crypto_tfm *tfm;
800 
801 	alg = crypto_spawn_alg(spawn);
802 	if (IS_ERR(alg))
803 		return ERR_CAST(alg);
804 
805 	tfm = ERR_PTR(-EINVAL);
806 	if (unlikely((alg->cra_flags ^ type) & mask))
807 		goto out_put_alg;
808 
809 	tfm = __crypto_alloc_tfm(alg, type, mask);
810 	if (IS_ERR(tfm))
811 		goto out_put_alg;
812 
813 	return tfm;
814 
815 out_put_alg:
816 	crypto_mod_put(alg);
817 	return tfm;
818 }
819 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
820 
821 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
822 {
823 	struct crypto_alg *alg;
824 	struct crypto_tfm *tfm;
825 
826 	alg = crypto_spawn_alg(spawn);
827 	if (IS_ERR(alg))
828 		return ERR_CAST(alg);
829 
830 	tfm = crypto_create_tfm(alg, spawn->frontend);
831 	if (IS_ERR(tfm))
832 		goto out_put_alg;
833 
834 	return tfm;
835 
836 out_put_alg:
837 	crypto_mod_put(alg);
838 	return tfm;
839 }
840 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
841 
842 int crypto_register_notifier(struct notifier_block *nb)
843 {
844 	return blocking_notifier_chain_register(&crypto_chain, nb);
845 }
846 EXPORT_SYMBOL_GPL(crypto_register_notifier);
847 
848 int crypto_unregister_notifier(struct notifier_block *nb)
849 {
850 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
851 }
852 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
853 
854 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
855 {
856 	struct rtattr *rta = tb[0];
857 	struct crypto_attr_type *algt;
858 
859 	if (!rta)
860 		return ERR_PTR(-ENOENT);
861 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
862 		return ERR_PTR(-EINVAL);
863 	if (rta->rta_type != CRYPTOA_TYPE)
864 		return ERR_PTR(-EINVAL);
865 
866 	algt = RTA_DATA(rta);
867 
868 	return algt;
869 }
870 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
871 
872 /**
873  * crypto_check_attr_type() - check algorithm type and compute inherited mask
874  * @tb: the template parameters
875  * @type: the algorithm type the template would be instantiated as
876  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
877  *	      to restrict the flags of any inner algorithms
878  *
879  * Validate that the algorithm type the user requested is compatible with the
880  * one the template would actually be instantiated as.  E.g., if the user is
881  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
882  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
883  *
884  * Also compute the mask to use to restrict the flags of any inner algorithms.
885  *
886  * Return: 0 on success; -errno on failure
887  */
888 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
889 {
890 	struct crypto_attr_type *algt;
891 
892 	algt = crypto_get_attr_type(tb);
893 	if (IS_ERR(algt))
894 		return PTR_ERR(algt);
895 
896 	if ((algt->type ^ type) & algt->mask)
897 		return -EINVAL;
898 
899 	*mask_ret = crypto_algt_inherited_mask(algt);
900 	return 0;
901 }
902 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
903 
904 const char *crypto_attr_alg_name(struct rtattr *rta)
905 {
906 	struct crypto_attr_alg *alga;
907 
908 	if (!rta)
909 		return ERR_PTR(-ENOENT);
910 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
911 		return ERR_PTR(-EINVAL);
912 	if (rta->rta_type != CRYPTOA_ALG)
913 		return ERR_PTR(-EINVAL);
914 
915 	alga = RTA_DATA(rta);
916 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
917 
918 	return alga->name;
919 }
920 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
921 
922 int __crypto_inst_setname(struct crypto_instance *inst, const char *name,
923 			  const char *driver, struct crypto_alg *alg)
924 {
925 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
926 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
927 		return -ENAMETOOLONG;
928 
929 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
930 		     driver, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
931 		return -ENAMETOOLONG;
932 
933 	return 0;
934 }
935 EXPORT_SYMBOL_GPL(__crypto_inst_setname);
936 
937 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
938 {
939 	INIT_LIST_HEAD(&queue->list);
940 	queue->backlog = &queue->list;
941 	queue->qlen = 0;
942 	queue->max_qlen = max_qlen;
943 }
944 EXPORT_SYMBOL_GPL(crypto_init_queue);
945 
946 int crypto_enqueue_request(struct crypto_queue *queue,
947 			   struct crypto_async_request *request)
948 {
949 	int err = -EINPROGRESS;
950 
951 	if (unlikely(queue->qlen >= queue->max_qlen)) {
952 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
953 			err = -ENOSPC;
954 			goto out;
955 		}
956 		err = -EBUSY;
957 		if (queue->backlog == &queue->list)
958 			queue->backlog = &request->list;
959 	}
960 
961 	queue->qlen++;
962 	list_add_tail(&request->list, &queue->list);
963 
964 out:
965 	return err;
966 }
967 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
968 
969 void crypto_enqueue_request_head(struct crypto_queue *queue,
970 				 struct crypto_async_request *request)
971 {
972 	if (unlikely(queue->qlen >= queue->max_qlen))
973 		queue->backlog = queue->backlog->prev;
974 
975 	queue->qlen++;
976 	list_add(&request->list, &queue->list);
977 }
978 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
979 
980 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
981 {
982 	struct list_head *request;
983 
984 	if (unlikely(!queue->qlen))
985 		return NULL;
986 
987 	queue->qlen--;
988 
989 	if (queue->backlog != &queue->list)
990 		queue->backlog = queue->backlog->next;
991 
992 	request = queue->list.next;
993 	list_del_init(request);
994 
995 	return list_entry(request, struct crypto_async_request, list);
996 }
997 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
998 
999 static inline void crypto_inc_byte(u8 *a, unsigned int size)
1000 {
1001 	u8 *b = (a + size);
1002 	u8 c;
1003 
1004 	for (; size; size--) {
1005 		c = *--b + 1;
1006 		*b = c;
1007 		if (c)
1008 			break;
1009 	}
1010 }
1011 
1012 void crypto_inc(u8 *a, unsigned int size)
1013 {
1014 	__be32 *b = (__be32 *)(a + size);
1015 	u32 c;
1016 
1017 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1018 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1019 		for (; size >= 4; size -= 4) {
1020 			c = be32_to_cpu(*--b) + 1;
1021 			*b = cpu_to_be32(c);
1022 			if (likely(c))
1023 				return;
1024 		}
1025 
1026 	crypto_inc_byte(a, size);
1027 }
1028 EXPORT_SYMBOL_GPL(crypto_inc);
1029 
1030 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1031 {
1032 	return alg->cra_ctxsize +
1033 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1034 }
1035 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1036 
1037 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1038 			u32 type, u32 mask)
1039 {
1040 	int ret = 0;
1041 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1042 
1043 	if (!IS_ERR(alg)) {
1044 		crypto_mod_put(alg);
1045 		ret = 1;
1046 	}
1047 
1048 	return ret;
1049 }
1050 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1051 
1052 static void __init crypto_start_tests(void)
1053 {
1054 	if (!IS_BUILTIN(CONFIG_CRYPTO_ALGAPI))
1055 		return;
1056 
1057 	if (!IS_ENABLED(CONFIG_CRYPTO_SELFTESTS))
1058 		return;
1059 
1060 	set_crypto_boot_test_finished();
1061 
1062 	for (;;) {
1063 		struct crypto_larval *larval = NULL;
1064 		struct crypto_alg *q;
1065 
1066 		down_write(&crypto_alg_sem);
1067 
1068 		list_for_each_entry(q, &crypto_alg_list, cra_list) {
1069 			struct crypto_larval *l;
1070 
1071 			if (!crypto_is_larval(q))
1072 				continue;
1073 
1074 			l = (void *)q;
1075 
1076 			if (!crypto_is_test_larval(l))
1077 				continue;
1078 
1079 			if (l->test_started)
1080 				continue;
1081 
1082 			l->test_started = true;
1083 			larval = l;
1084 			break;
1085 		}
1086 
1087 		up_write(&crypto_alg_sem);
1088 
1089 		if (!larval)
1090 			break;
1091 
1092 		crypto_schedule_test(larval);
1093 	}
1094 }
1095 
1096 static int __init crypto_algapi_init(void)
1097 {
1098 	crypto_init_proc();
1099 	crypto_start_tests();
1100 	return 0;
1101 }
1102 
1103 static void __exit crypto_algapi_exit(void)
1104 {
1105 	crypto_exit_proc();
1106 }
1107 
1108 /*
1109  * We run this at late_initcall so that all the built-in algorithms
1110  * have had a chance to register themselves first.
1111  */
1112 late_initcall(crypto_algapi_init);
1113 module_exit(crypto_algapi_exit);
1114 
1115 MODULE_LICENSE("GPL");
1116 MODULE_DESCRIPTION("Cryptographic algorithms API");
1117 MODULE_SOFTDEP("pre: cryptomgr");
1118