xref: /linux/crypto/algapi.c (revision 37744feebc086908fd89760650f458ab19071750)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 
20 #include "internal.h"
21 
22 static LIST_HEAD(crypto_template_list);
23 
24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 	if (fips_enabled && mod && !module_sig_ok(mod))
27 		panic("Module %s signature verification failed in FIPS mode\n",
28 		      module_name(mod));
29 }
30 
31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 	crypto_check_module_sig(alg->cra_module);
34 
35 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 		return -EINVAL;
37 
38 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 		return -EINVAL;
40 
41 	/* General maximums for all algs. */
42 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 		return -EINVAL;
44 
45 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 		return -EINVAL;
47 
48 	/* Lower maximums for specific alg types. */
49 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 			       CRYPTO_ALG_TYPE_CIPHER) {
51 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 			return -EINVAL;
53 
54 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 			return -EINVAL;
56 	}
57 
58 	if (alg->cra_priority < 0)
59 		return -EINVAL;
60 
61 	refcount_set(&alg->cra_refcnt, 1);
62 
63 	return 0;
64 }
65 
66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 	inst->alg.cra_type->free(inst);
69 }
70 
71 static void crypto_destroy_instance(struct crypto_alg *alg)
72 {
73 	struct crypto_instance *inst = (void *)alg;
74 	struct crypto_template *tmpl = inst->tmpl;
75 
76 	crypto_free_instance(inst);
77 	crypto_tmpl_put(tmpl);
78 }
79 
80 /*
81  * This function adds a spawn to the list secondary_spawns which
82  * will be used at the end of crypto_remove_spawns to unregister
83  * instances, unless the spawn happens to be one that is depended
84  * on by the new algorithm (nalg in crypto_remove_spawns).
85  *
86  * This function is also responsible for resurrecting any algorithms
87  * in the dependency chain of nalg by unsetting n->dead.
88  */
89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 					    struct list_head *stack,
91 					    struct list_head *top,
92 					    struct list_head *secondary_spawns)
93 {
94 	struct crypto_spawn *spawn, *n;
95 
96 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 	if (!spawn)
98 		return NULL;
99 
100 	n = list_prev_entry(spawn, list);
101 	list_move(&spawn->list, secondary_spawns);
102 
103 	if (list_is_last(&n->list, stack))
104 		return top;
105 
106 	n = list_next_entry(n, list);
107 	if (!spawn->dead)
108 		n->dead = false;
109 
110 	return &n->inst->alg.cra_users;
111 }
112 
113 static void crypto_remove_instance(struct crypto_instance *inst,
114 				   struct list_head *list)
115 {
116 	struct crypto_template *tmpl = inst->tmpl;
117 
118 	if (crypto_is_dead(&inst->alg))
119 		return;
120 
121 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122 
123 	if (!tmpl || !crypto_tmpl_get(tmpl))
124 		return;
125 
126 	list_move(&inst->alg.cra_list, list);
127 	hlist_del(&inst->list);
128 	inst->alg.cra_destroy = crypto_destroy_instance;
129 
130 	BUG_ON(!list_empty(&inst->alg.cra_users));
131 }
132 
133 /*
134  * Given an algorithm alg, remove all algorithms that depend on it
135  * through spawns.  If nalg is not null, then exempt any algorithms
136  * that is depended on by nalg.  This is useful when nalg itself
137  * depends on alg.
138  */
139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 			  struct crypto_alg *nalg)
141 {
142 	u32 new_type = (nalg ?: alg)->cra_flags;
143 	struct crypto_spawn *spawn, *n;
144 	LIST_HEAD(secondary_spawns);
145 	struct list_head *spawns;
146 	LIST_HEAD(stack);
147 	LIST_HEAD(top);
148 
149 	spawns = &alg->cra_users;
150 	list_for_each_entry_safe(spawn, n, spawns, list) {
151 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152 			continue;
153 
154 		list_move(&spawn->list, &top);
155 	}
156 
157 	/*
158 	 * Perform a depth-first walk starting from alg through
159 	 * the cra_users tree.  The list stack records the path
160 	 * from alg to the current spawn.
161 	 */
162 	spawns = &top;
163 	do {
164 		while (!list_empty(spawns)) {
165 			struct crypto_instance *inst;
166 
167 			spawn = list_first_entry(spawns, struct crypto_spawn,
168 						 list);
169 			inst = spawn->inst;
170 
171 			list_move(&spawn->list, &stack);
172 			spawn->dead = !spawn->registered || &inst->alg != nalg;
173 
174 			if (!spawn->registered)
175 				break;
176 
177 			BUG_ON(&inst->alg == alg);
178 
179 			if (&inst->alg == nalg)
180 				break;
181 
182 			spawns = &inst->alg.cra_users;
183 
184 			/*
185 			 * Even if spawn->registered is true, the
186 			 * instance itself may still be unregistered.
187 			 * This is because it may have failed during
188 			 * registration.  Therefore we still need to
189 			 * make the following test.
190 			 *
191 			 * We may encounter an unregistered instance here, since
192 			 * an instance's spawns are set up prior to the instance
193 			 * being registered.  An unregistered instance will have
194 			 * NULL ->cra_users.next, since ->cra_users isn't
195 			 * properly initialized until registration.  But an
196 			 * unregistered instance cannot have any users, so treat
197 			 * it the same as ->cra_users being empty.
198 			 */
199 			if (spawns->next == NULL)
200 				break;
201 		}
202 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 					      &secondary_spawns)));
204 
205 	/*
206 	 * Remove all instances that are marked as dead.  Also
207 	 * complete the resurrection of the others by moving them
208 	 * back to the cra_users list.
209 	 */
210 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211 		if (!spawn->dead)
212 			list_move(&spawn->list, &spawn->alg->cra_users);
213 		else if (spawn->registered)
214 			crypto_remove_instance(spawn->inst, list);
215 	}
216 }
217 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218 
219 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
220 {
221 	struct crypto_alg *q;
222 	struct crypto_larval *larval;
223 	int ret = -EAGAIN;
224 
225 	if (crypto_is_dead(alg))
226 		goto err;
227 
228 	INIT_LIST_HEAD(&alg->cra_users);
229 
230 	/* No cheating! */
231 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
232 
233 	ret = -EEXIST;
234 
235 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
236 		if (q == alg)
237 			goto err;
238 
239 		if (crypto_is_moribund(q))
240 			continue;
241 
242 		if (crypto_is_larval(q)) {
243 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
244 				goto err;
245 			continue;
246 		}
247 
248 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
249 		    !strcmp(q->cra_name, alg->cra_driver_name))
250 			goto err;
251 	}
252 
253 	larval = crypto_larval_alloc(alg->cra_name,
254 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
255 	if (IS_ERR(larval))
256 		goto out;
257 
258 	ret = -ENOENT;
259 	larval->adult = crypto_mod_get(alg);
260 	if (!larval->adult)
261 		goto free_larval;
262 
263 	refcount_set(&larval->alg.cra_refcnt, 1);
264 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
265 	       CRYPTO_MAX_ALG_NAME);
266 	larval->alg.cra_priority = alg->cra_priority;
267 
268 	list_add(&alg->cra_list, &crypto_alg_list);
269 	list_add(&larval->alg.cra_list, &crypto_alg_list);
270 
271 	crypto_stats_init(alg);
272 
273 out:
274 	return larval;
275 
276 free_larval:
277 	kfree(larval);
278 err:
279 	larval = ERR_PTR(ret);
280 	goto out;
281 }
282 
283 void crypto_alg_tested(const char *name, int err)
284 {
285 	struct crypto_larval *test;
286 	struct crypto_alg *alg;
287 	struct crypto_alg *q;
288 	LIST_HEAD(list);
289 	bool best;
290 
291 	down_write(&crypto_alg_sem);
292 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
293 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
294 			continue;
295 
296 		test = (struct crypto_larval *)q;
297 
298 		if (!strcmp(q->cra_driver_name, name))
299 			goto found;
300 	}
301 
302 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
303 	goto unlock;
304 
305 found:
306 	q->cra_flags |= CRYPTO_ALG_DEAD;
307 	alg = test->adult;
308 	if (err || list_empty(&alg->cra_list))
309 		goto complete;
310 
311 	alg->cra_flags |= CRYPTO_ALG_TESTED;
312 
313 	/* Only satisfy larval waiters if we are the best. */
314 	best = true;
315 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
316 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
317 			continue;
318 
319 		if (strcmp(alg->cra_name, q->cra_name))
320 			continue;
321 
322 		if (q->cra_priority > alg->cra_priority) {
323 			best = false;
324 			break;
325 		}
326 	}
327 
328 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
329 		if (q == alg)
330 			continue;
331 
332 		if (crypto_is_moribund(q))
333 			continue;
334 
335 		if (crypto_is_larval(q)) {
336 			struct crypto_larval *larval = (void *)q;
337 
338 			/*
339 			 * Check to see if either our generic name or
340 			 * specific name can satisfy the name requested
341 			 * by the larval entry q.
342 			 */
343 			if (strcmp(alg->cra_name, q->cra_name) &&
344 			    strcmp(alg->cra_driver_name, q->cra_name))
345 				continue;
346 
347 			if (larval->adult)
348 				continue;
349 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
350 				continue;
351 
352 			if (best && crypto_mod_get(alg))
353 				larval->adult = alg;
354 			else
355 				larval->adult = ERR_PTR(-EAGAIN);
356 
357 			continue;
358 		}
359 
360 		if (strcmp(alg->cra_name, q->cra_name))
361 			continue;
362 
363 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
364 		    q->cra_priority > alg->cra_priority)
365 			continue;
366 
367 		crypto_remove_spawns(q, &list, alg);
368 	}
369 
370 complete:
371 	complete_all(&test->completion);
372 
373 unlock:
374 	up_write(&crypto_alg_sem);
375 
376 	crypto_remove_final(&list);
377 }
378 EXPORT_SYMBOL_GPL(crypto_alg_tested);
379 
380 void crypto_remove_final(struct list_head *list)
381 {
382 	struct crypto_alg *alg;
383 	struct crypto_alg *n;
384 
385 	list_for_each_entry_safe(alg, n, list, cra_list) {
386 		list_del_init(&alg->cra_list);
387 		crypto_alg_put(alg);
388 	}
389 }
390 EXPORT_SYMBOL_GPL(crypto_remove_final);
391 
392 static void crypto_wait_for_test(struct crypto_larval *larval)
393 {
394 	int err;
395 
396 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
397 	if (err != NOTIFY_STOP) {
398 		if (WARN_ON(err != NOTIFY_DONE))
399 			goto out;
400 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
401 	}
402 
403 	err = wait_for_completion_killable(&larval->completion);
404 	WARN_ON(err);
405 	if (!err)
406 		crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
407 
408 out:
409 	crypto_larval_kill(&larval->alg);
410 }
411 
412 int crypto_register_alg(struct crypto_alg *alg)
413 {
414 	struct crypto_larval *larval;
415 	int err;
416 
417 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
418 	err = crypto_check_alg(alg);
419 	if (err)
420 		return err;
421 
422 	down_write(&crypto_alg_sem);
423 	larval = __crypto_register_alg(alg);
424 	up_write(&crypto_alg_sem);
425 
426 	if (IS_ERR(larval))
427 		return PTR_ERR(larval);
428 
429 	crypto_wait_for_test(larval);
430 	return 0;
431 }
432 EXPORT_SYMBOL_GPL(crypto_register_alg);
433 
434 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
435 {
436 	if (unlikely(list_empty(&alg->cra_list)))
437 		return -ENOENT;
438 
439 	alg->cra_flags |= CRYPTO_ALG_DEAD;
440 
441 	list_del_init(&alg->cra_list);
442 	crypto_remove_spawns(alg, list, NULL);
443 
444 	return 0;
445 }
446 
447 void crypto_unregister_alg(struct crypto_alg *alg)
448 {
449 	int ret;
450 	LIST_HEAD(list);
451 
452 	down_write(&crypto_alg_sem);
453 	ret = crypto_remove_alg(alg, &list);
454 	up_write(&crypto_alg_sem);
455 
456 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
457 		return;
458 
459 	BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
460 	if (alg->cra_destroy)
461 		alg->cra_destroy(alg);
462 
463 	crypto_remove_final(&list);
464 }
465 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
466 
467 int crypto_register_algs(struct crypto_alg *algs, int count)
468 {
469 	int i, ret;
470 
471 	for (i = 0; i < count; i++) {
472 		ret = crypto_register_alg(&algs[i]);
473 		if (ret)
474 			goto err;
475 	}
476 
477 	return 0;
478 
479 err:
480 	for (--i; i >= 0; --i)
481 		crypto_unregister_alg(&algs[i]);
482 
483 	return ret;
484 }
485 EXPORT_SYMBOL_GPL(crypto_register_algs);
486 
487 void crypto_unregister_algs(struct crypto_alg *algs, int count)
488 {
489 	int i;
490 
491 	for (i = 0; i < count; i++)
492 		crypto_unregister_alg(&algs[i]);
493 }
494 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
495 
496 int crypto_register_template(struct crypto_template *tmpl)
497 {
498 	struct crypto_template *q;
499 	int err = -EEXIST;
500 
501 	down_write(&crypto_alg_sem);
502 
503 	crypto_check_module_sig(tmpl->module);
504 
505 	list_for_each_entry(q, &crypto_template_list, list) {
506 		if (q == tmpl)
507 			goto out;
508 	}
509 
510 	list_add(&tmpl->list, &crypto_template_list);
511 	err = 0;
512 out:
513 	up_write(&crypto_alg_sem);
514 	return err;
515 }
516 EXPORT_SYMBOL_GPL(crypto_register_template);
517 
518 int crypto_register_templates(struct crypto_template *tmpls, int count)
519 {
520 	int i, err;
521 
522 	for (i = 0; i < count; i++) {
523 		err = crypto_register_template(&tmpls[i]);
524 		if (err)
525 			goto out;
526 	}
527 	return 0;
528 
529 out:
530 	for (--i; i >= 0; --i)
531 		crypto_unregister_template(&tmpls[i]);
532 	return err;
533 }
534 EXPORT_SYMBOL_GPL(crypto_register_templates);
535 
536 void crypto_unregister_template(struct crypto_template *tmpl)
537 {
538 	struct crypto_instance *inst;
539 	struct hlist_node *n;
540 	struct hlist_head *list;
541 	LIST_HEAD(users);
542 
543 	down_write(&crypto_alg_sem);
544 
545 	BUG_ON(list_empty(&tmpl->list));
546 	list_del_init(&tmpl->list);
547 
548 	list = &tmpl->instances;
549 	hlist_for_each_entry(inst, list, list) {
550 		int err = crypto_remove_alg(&inst->alg, &users);
551 
552 		BUG_ON(err);
553 	}
554 
555 	up_write(&crypto_alg_sem);
556 
557 	hlist_for_each_entry_safe(inst, n, list, list) {
558 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
559 		crypto_free_instance(inst);
560 	}
561 	crypto_remove_final(&users);
562 }
563 EXPORT_SYMBOL_GPL(crypto_unregister_template);
564 
565 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
566 {
567 	int i;
568 
569 	for (i = count - 1; i >= 0; --i)
570 		crypto_unregister_template(&tmpls[i]);
571 }
572 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
573 
574 static struct crypto_template *__crypto_lookup_template(const char *name)
575 {
576 	struct crypto_template *q, *tmpl = NULL;
577 
578 	down_read(&crypto_alg_sem);
579 	list_for_each_entry(q, &crypto_template_list, list) {
580 		if (strcmp(q->name, name))
581 			continue;
582 		if (unlikely(!crypto_tmpl_get(q)))
583 			continue;
584 
585 		tmpl = q;
586 		break;
587 	}
588 	up_read(&crypto_alg_sem);
589 
590 	return tmpl;
591 }
592 
593 struct crypto_template *crypto_lookup_template(const char *name)
594 {
595 	return try_then_request_module(__crypto_lookup_template(name),
596 				       "crypto-%s", name);
597 }
598 EXPORT_SYMBOL_GPL(crypto_lookup_template);
599 
600 int crypto_register_instance(struct crypto_template *tmpl,
601 			     struct crypto_instance *inst)
602 {
603 	struct crypto_larval *larval;
604 	struct crypto_spawn *spawn;
605 	int err;
606 
607 	err = crypto_check_alg(&inst->alg);
608 	if (err)
609 		return err;
610 
611 	inst->alg.cra_module = tmpl->module;
612 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
613 
614 	down_write(&crypto_alg_sem);
615 
616 	larval = ERR_PTR(-EAGAIN);
617 	for (spawn = inst->spawns; spawn;) {
618 		struct crypto_spawn *next;
619 
620 		if (spawn->dead)
621 			goto unlock;
622 
623 		next = spawn->next;
624 		spawn->inst = inst;
625 		spawn->registered = true;
626 
627 		crypto_mod_put(spawn->alg);
628 
629 		spawn = next;
630 	}
631 
632 	larval = __crypto_register_alg(&inst->alg);
633 	if (IS_ERR(larval))
634 		goto unlock;
635 
636 	hlist_add_head(&inst->list, &tmpl->instances);
637 	inst->tmpl = tmpl;
638 
639 unlock:
640 	up_write(&crypto_alg_sem);
641 
642 	err = PTR_ERR(larval);
643 	if (IS_ERR(larval))
644 		goto err;
645 
646 	crypto_wait_for_test(larval);
647 	err = 0;
648 
649 err:
650 	return err;
651 }
652 EXPORT_SYMBOL_GPL(crypto_register_instance);
653 
654 void crypto_unregister_instance(struct crypto_instance *inst)
655 {
656 	LIST_HEAD(list);
657 
658 	down_write(&crypto_alg_sem);
659 
660 	crypto_remove_spawns(&inst->alg, &list, NULL);
661 	crypto_remove_instance(inst, &list);
662 
663 	up_write(&crypto_alg_sem);
664 
665 	crypto_remove_final(&list);
666 }
667 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
668 
669 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
670 		      const char *name, u32 type, u32 mask)
671 {
672 	struct crypto_alg *alg;
673 	int err = -EAGAIN;
674 
675 	if (WARN_ON_ONCE(inst == NULL))
676 		return -EINVAL;
677 
678 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
679 	if (IS_ERR(name))
680 		return PTR_ERR(name);
681 
682 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
683 	if (IS_ERR(alg))
684 		return PTR_ERR(alg);
685 
686 	down_write(&crypto_alg_sem);
687 	if (!crypto_is_moribund(alg)) {
688 		list_add(&spawn->list, &alg->cra_users);
689 		spawn->alg = alg;
690 		spawn->mask = mask;
691 		spawn->next = inst->spawns;
692 		inst->spawns = spawn;
693 		err = 0;
694 	}
695 	up_write(&crypto_alg_sem);
696 	if (err)
697 		crypto_mod_put(alg);
698 	return err;
699 }
700 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
701 
702 void crypto_drop_spawn(struct crypto_spawn *spawn)
703 {
704 	if (!spawn->alg) /* not yet initialized? */
705 		return;
706 
707 	down_write(&crypto_alg_sem);
708 	if (!spawn->dead)
709 		list_del(&spawn->list);
710 	up_write(&crypto_alg_sem);
711 
712 	if (!spawn->registered)
713 		crypto_mod_put(spawn->alg);
714 }
715 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
716 
717 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
718 {
719 	struct crypto_alg *alg;
720 
721 	down_read(&crypto_alg_sem);
722 	alg = spawn->alg;
723 	if (!spawn->dead && !crypto_mod_get(alg)) {
724 		alg->cra_flags |= CRYPTO_ALG_DYING;
725 		alg = NULL;
726 	}
727 	up_read(&crypto_alg_sem);
728 
729 	return alg ?: ERR_PTR(-EAGAIN);
730 }
731 
732 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
733 				    u32 mask)
734 {
735 	struct crypto_alg *alg;
736 	struct crypto_tfm *tfm;
737 
738 	alg = crypto_spawn_alg(spawn);
739 	if (IS_ERR(alg))
740 		return ERR_CAST(alg);
741 
742 	tfm = ERR_PTR(-EINVAL);
743 	if (unlikely((alg->cra_flags ^ type) & mask))
744 		goto out_put_alg;
745 
746 	tfm = __crypto_alloc_tfm(alg, type, mask);
747 	if (IS_ERR(tfm))
748 		goto out_put_alg;
749 
750 	return tfm;
751 
752 out_put_alg:
753 	crypto_mod_put(alg);
754 	return tfm;
755 }
756 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
757 
758 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
759 {
760 	struct crypto_alg *alg;
761 	struct crypto_tfm *tfm;
762 
763 	alg = crypto_spawn_alg(spawn);
764 	if (IS_ERR(alg))
765 		return ERR_CAST(alg);
766 
767 	tfm = crypto_create_tfm(alg, spawn->frontend);
768 	if (IS_ERR(tfm))
769 		goto out_put_alg;
770 
771 	return tfm;
772 
773 out_put_alg:
774 	crypto_mod_put(alg);
775 	return tfm;
776 }
777 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
778 
779 int crypto_register_notifier(struct notifier_block *nb)
780 {
781 	return blocking_notifier_chain_register(&crypto_chain, nb);
782 }
783 EXPORT_SYMBOL_GPL(crypto_register_notifier);
784 
785 int crypto_unregister_notifier(struct notifier_block *nb)
786 {
787 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
788 }
789 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
790 
791 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
792 {
793 	struct rtattr *rta = tb[0];
794 	struct crypto_attr_type *algt;
795 
796 	if (!rta)
797 		return ERR_PTR(-ENOENT);
798 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
799 		return ERR_PTR(-EINVAL);
800 	if (rta->rta_type != CRYPTOA_TYPE)
801 		return ERR_PTR(-EINVAL);
802 
803 	algt = RTA_DATA(rta);
804 
805 	return algt;
806 }
807 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
808 
809 int crypto_check_attr_type(struct rtattr **tb, u32 type)
810 {
811 	struct crypto_attr_type *algt;
812 
813 	algt = crypto_get_attr_type(tb);
814 	if (IS_ERR(algt))
815 		return PTR_ERR(algt);
816 
817 	if ((algt->type ^ type) & algt->mask)
818 		return -EINVAL;
819 
820 	return 0;
821 }
822 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
823 
824 const char *crypto_attr_alg_name(struct rtattr *rta)
825 {
826 	struct crypto_attr_alg *alga;
827 
828 	if (!rta)
829 		return ERR_PTR(-ENOENT);
830 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
831 		return ERR_PTR(-EINVAL);
832 	if (rta->rta_type != CRYPTOA_ALG)
833 		return ERR_PTR(-EINVAL);
834 
835 	alga = RTA_DATA(rta);
836 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
837 
838 	return alga->name;
839 }
840 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
841 
842 int crypto_attr_u32(struct rtattr *rta, u32 *num)
843 {
844 	struct crypto_attr_u32 *nu32;
845 
846 	if (!rta)
847 		return -ENOENT;
848 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
849 		return -EINVAL;
850 	if (rta->rta_type != CRYPTOA_U32)
851 		return -EINVAL;
852 
853 	nu32 = RTA_DATA(rta);
854 	*num = nu32->num;
855 
856 	return 0;
857 }
858 EXPORT_SYMBOL_GPL(crypto_attr_u32);
859 
860 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
861 			struct crypto_alg *alg)
862 {
863 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
864 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
865 		return -ENAMETOOLONG;
866 
867 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
868 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
869 		return -ENAMETOOLONG;
870 
871 	return 0;
872 }
873 EXPORT_SYMBOL_GPL(crypto_inst_setname);
874 
875 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
876 {
877 	INIT_LIST_HEAD(&queue->list);
878 	queue->backlog = &queue->list;
879 	queue->qlen = 0;
880 	queue->max_qlen = max_qlen;
881 }
882 EXPORT_SYMBOL_GPL(crypto_init_queue);
883 
884 int crypto_enqueue_request(struct crypto_queue *queue,
885 			   struct crypto_async_request *request)
886 {
887 	int err = -EINPROGRESS;
888 
889 	if (unlikely(queue->qlen >= queue->max_qlen)) {
890 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
891 			err = -ENOSPC;
892 			goto out;
893 		}
894 		err = -EBUSY;
895 		if (queue->backlog == &queue->list)
896 			queue->backlog = &request->list;
897 	}
898 
899 	queue->qlen++;
900 	list_add_tail(&request->list, &queue->list);
901 
902 out:
903 	return err;
904 }
905 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
906 
907 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
908 {
909 	struct list_head *request;
910 
911 	if (unlikely(!queue->qlen))
912 		return NULL;
913 
914 	queue->qlen--;
915 
916 	if (queue->backlog != &queue->list)
917 		queue->backlog = queue->backlog->next;
918 
919 	request = queue->list.next;
920 	list_del(request);
921 
922 	return list_entry(request, struct crypto_async_request, list);
923 }
924 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
925 
926 static inline void crypto_inc_byte(u8 *a, unsigned int size)
927 {
928 	u8 *b = (a + size);
929 	u8 c;
930 
931 	for (; size; size--) {
932 		c = *--b + 1;
933 		*b = c;
934 		if (c)
935 			break;
936 	}
937 }
938 
939 void crypto_inc(u8 *a, unsigned int size)
940 {
941 	__be32 *b = (__be32 *)(a + size);
942 	u32 c;
943 
944 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
945 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
946 		for (; size >= 4; size -= 4) {
947 			c = be32_to_cpu(*--b) + 1;
948 			*b = cpu_to_be32(c);
949 			if (likely(c))
950 				return;
951 		}
952 
953 	crypto_inc_byte(a, size);
954 }
955 EXPORT_SYMBOL_GPL(crypto_inc);
956 
957 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
958 {
959 	int relalign = 0;
960 
961 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
962 		int size = sizeof(unsigned long);
963 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
964 			 ((unsigned long)dst ^ (unsigned long)src2)) &
965 			(size - 1);
966 
967 		relalign = d ? 1 << __ffs(d) : size;
968 
969 		/*
970 		 * If we care about alignment, process as many bytes as
971 		 * needed to advance dst and src to values whose alignments
972 		 * equal their relative alignment. This will allow us to
973 		 * process the remainder of the input using optimal strides.
974 		 */
975 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
976 			*dst++ = *src1++ ^ *src2++;
977 			len--;
978 		}
979 	}
980 
981 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
982 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
983 		dst += 8;
984 		src1 += 8;
985 		src2 += 8;
986 		len -= 8;
987 	}
988 
989 	while (len >= 4 && !(relalign & 3)) {
990 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
991 		dst += 4;
992 		src1 += 4;
993 		src2 += 4;
994 		len -= 4;
995 	}
996 
997 	while (len >= 2 && !(relalign & 1)) {
998 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
999 		dst += 2;
1000 		src1 += 2;
1001 		src2 += 2;
1002 		len -= 2;
1003 	}
1004 
1005 	while (len--)
1006 		*dst++ = *src1++ ^ *src2++;
1007 }
1008 EXPORT_SYMBOL_GPL(__crypto_xor);
1009 
1010 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1011 {
1012 	return alg->cra_ctxsize +
1013 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1014 }
1015 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1016 
1017 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1018 			u32 type, u32 mask)
1019 {
1020 	int ret = 0;
1021 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1022 
1023 	if (!IS_ERR(alg)) {
1024 		crypto_mod_put(alg);
1025 		ret = 1;
1026 	}
1027 
1028 	return ret;
1029 }
1030 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1031 
1032 #ifdef CONFIG_CRYPTO_STATS
1033 void crypto_stats_init(struct crypto_alg *alg)
1034 {
1035 	memset(&alg->stats, 0, sizeof(alg->stats));
1036 }
1037 EXPORT_SYMBOL_GPL(crypto_stats_init);
1038 
1039 void crypto_stats_get(struct crypto_alg *alg)
1040 {
1041 	crypto_alg_get(alg);
1042 }
1043 EXPORT_SYMBOL_GPL(crypto_stats_get);
1044 
1045 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1046 			       int ret)
1047 {
1048 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1049 		atomic64_inc(&alg->stats.aead.err_cnt);
1050 	} else {
1051 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1052 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1053 	}
1054 	crypto_alg_put(alg);
1055 }
1056 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1057 
1058 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1059 			       int ret)
1060 {
1061 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1062 		atomic64_inc(&alg->stats.aead.err_cnt);
1063 	} else {
1064 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1065 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1066 	}
1067 	crypto_alg_put(alg);
1068 }
1069 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1070 
1071 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1072 				   struct crypto_alg *alg)
1073 {
1074 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1075 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1076 	} else {
1077 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1078 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1079 	}
1080 	crypto_alg_put(alg);
1081 }
1082 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1083 
1084 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1085 				   struct crypto_alg *alg)
1086 {
1087 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1088 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1089 	} else {
1090 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1091 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1092 	}
1093 	crypto_alg_put(alg);
1094 }
1095 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1096 
1097 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1098 {
1099 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1100 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1101 	else
1102 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1103 	crypto_alg_put(alg);
1104 }
1105 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1106 
1107 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1108 {
1109 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1110 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1111 	else
1112 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1113 	crypto_alg_put(alg);
1114 }
1115 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1116 
1117 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1118 {
1119 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1120 		atomic64_inc(&alg->stats.compress.err_cnt);
1121 	} else {
1122 		atomic64_inc(&alg->stats.compress.compress_cnt);
1123 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1124 	}
1125 	crypto_alg_put(alg);
1126 }
1127 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1128 
1129 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1130 {
1131 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1132 		atomic64_inc(&alg->stats.compress.err_cnt);
1133 	} else {
1134 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1135 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1136 	}
1137 	crypto_alg_put(alg);
1138 }
1139 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1140 
1141 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1142 			       struct crypto_alg *alg)
1143 {
1144 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1145 		atomic64_inc(&alg->stats.hash.err_cnt);
1146 	else
1147 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1148 	crypto_alg_put(alg);
1149 }
1150 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1151 
1152 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1153 			      struct crypto_alg *alg)
1154 {
1155 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1156 		atomic64_inc(&alg->stats.hash.err_cnt);
1157 	} else {
1158 		atomic64_inc(&alg->stats.hash.hash_cnt);
1159 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1160 	}
1161 	crypto_alg_put(alg);
1162 }
1163 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1164 
1165 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1166 {
1167 	if (ret)
1168 		atomic64_inc(&alg->stats.kpp.err_cnt);
1169 	else
1170 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1171 	crypto_alg_put(alg);
1172 }
1173 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1174 
1175 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1176 {
1177 	if (ret)
1178 		atomic64_inc(&alg->stats.kpp.err_cnt);
1179 	else
1180 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1181 	crypto_alg_put(alg);
1182 }
1183 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1184 
1185 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1186 {
1187 	if (ret)
1188 		atomic64_inc(&alg->stats.kpp.err_cnt);
1189 	else
1190 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1191 	crypto_alg_put(alg);
1192 }
1193 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1194 
1195 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1196 {
1197 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1198 		atomic64_inc(&alg->stats.rng.err_cnt);
1199 	else
1200 		atomic64_inc(&alg->stats.rng.seed_cnt);
1201 	crypto_alg_put(alg);
1202 }
1203 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1204 
1205 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1206 			       int ret)
1207 {
1208 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1209 		atomic64_inc(&alg->stats.rng.err_cnt);
1210 	} else {
1211 		atomic64_inc(&alg->stats.rng.generate_cnt);
1212 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1213 	}
1214 	crypto_alg_put(alg);
1215 }
1216 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1217 
1218 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1219 				   struct crypto_alg *alg)
1220 {
1221 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1222 		atomic64_inc(&alg->stats.cipher.err_cnt);
1223 	} else {
1224 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1225 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1226 	}
1227 	crypto_alg_put(alg);
1228 }
1229 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1230 
1231 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1232 				   struct crypto_alg *alg)
1233 {
1234 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1235 		atomic64_inc(&alg->stats.cipher.err_cnt);
1236 	} else {
1237 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1238 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1239 	}
1240 	crypto_alg_put(alg);
1241 }
1242 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1243 #endif
1244 
1245 static int __init crypto_algapi_init(void)
1246 {
1247 	crypto_init_proc();
1248 	return 0;
1249 }
1250 
1251 static void __exit crypto_algapi_exit(void)
1252 {
1253 	crypto_exit_proc();
1254 }
1255 
1256 module_init(crypto_algapi_init);
1257 module_exit(crypto_algapi_exit);
1258 
1259 MODULE_LICENSE("GPL");
1260 MODULE_DESCRIPTION("Cryptographic algorithms API");
1261