xref: /linux/crypto/api.c (revision 69bfec7548f4c1595bac0e3ddfc0458a5af31f4c)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10  * and Nettle, by Niels Möller.
11  */
12 
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
24 #include "internal.h"
25 
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30 
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
33 
34 #ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36 EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
37 #endif
38 
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40 
41 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42 {
43 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44 }
45 EXPORT_SYMBOL_GPL(crypto_mod_get);
46 
47 void crypto_mod_put(struct crypto_alg *alg)
48 {
49 	struct module *module = alg->cra_module;
50 
51 	crypto_alg_put(alg);
52 	module_put(module);
53 }
54 EXPORT_SYMBOL_GPL(crypto_mod_put);
55 
56 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 					      u32 mask)
58 {
59 	struct crypto_alg *q, *alg = NULL;
60 	int best = -2;
61 
62 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 		int exact, fuzzy;
64 
65 		if (crypto_is_moribund(q))
66 			continue;
67 
68 		if ((q->cra_flags ^ type) & mask)
69 			continue;
70 
71 		if (crypto_is_larval(q) &&
72 		    !crypto_is_test_larval((struct crypto_larval *)q) &&
73 		    ((struct crypto_larval *)q)->mask != mask)
74 			continue;
75 
76 		exact = !strcmp(q->cra_driver_name, name);
77 		fuzzy = !strcmp(q->cra_name, name);
78 		if (!exact && !(fuzzy && q->cra_priority > best))
79 			continue;
80 
81 		if (unlikely(!crypto_mod_get(q)))
82 			continue;
83 
84 		best = q->cra_priority;
85 		if (alg)
86 			crypto_mod_put(alg);
87 		alg = q;
88 
89 		if (exact)
90 			break;
91 	}
92 
93 	return alg;
94 }
95 
96 static void crypto_larval_destroy(struct crypto_alg *alg)
97 {
98 	struct crypto_larval *larval = (void *)alg;
99 
100 	BUG_ON(!crypto_is_larval(alg));
101 	if (!IS_ERR_OR_NULL(larval->adult))
102 		crypto_mod_put(larval->adult);
103 	kfree(larval);
104 }
105 
106 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107 {
108 	struct crypto_larval *larval;
109 
110 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 	if (!larval)
112 		return ERR_PTR(-ENOMEM);
113 
114 	larval->mask = mask;
115 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 	larval->alg.cra_priority = -1;
117 	larval->alg.cra_destroy = crypto_larval_destroy;
118 
119 	strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 	init_completion(&larval->completion);
121 
122 	return larval;
123 }
124 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125 
126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 					    u32 mask)
128 {
129 	struct crypto_alg *alg;
130 	struct crypto_larval *larval;
131 
132 	larval = crypto_larval_alloc(name, type, mask);
133 	if (IS_ERR(larval))
134 		return ERR_CAST(larval);
135 
136 	refcount_set(&larval->alg.cra_refcnt, 2);
137 
138 	down_write(&crypto_alg_sem);
139 	alg = __crypto_alg_lookup(name, type, mask);
140 	if (!alg) {
141 		alg = &larval->alg;
142 		list_add(&alg->cra_list, &crypto_alg_list);
143 	}
144 	up_write(&crypto_alg_sem);
145 
146 	if (alg != &larval->alg) {
147 		kfree(larval);
148 		if (crypto_is_larval(alg))
149 			alg = crypto_larval_wait(alg);
150 	}
151 
152 	return alg;
153 }
154 
155 void crypto_larval_kill(struct crypto_alg *alg)
156 {
157 	struct crypto_larval *larval = (void *)alg;
158 
159 	down_write(&crypto_alg_sem);
160 	list_del(&alg->cra_list);
161 	up_write(&crypto_alg_sem);
162 	complete_all(&larval->completion);
163 	crypto_alg_put(alg);
164 }
165 EXPORT_SYMBOL_GPL(crypto_larval_kill);
166 
167 void crypto_wait_for_test(struct crypto_larval *larval)
168 {
169 	int err;
170 
171 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 	if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 		goto out;
174 
175 	err = wait_for_completion_killable(&larval->completion);
176 	WARN_ON(err);
177 out:
178 	crypto_larval_kill(&larval->alg);
179 }
180 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181 
182 static void crypto_start_test(struct crypto_larval *larval)
183 {
184 	if (!crypto_is_test_larval(larval))
185 		return;
186 
187 	if (larval->test_started)
188 		return;
189 
190 	down_write(&crypto_alg_sem);
191 	if (larval->test_started) {
192 		up_write(&crypto_alg_sem);
193 		return;
194 	}
195 
196 	larval->test_started = true;
197 	up_write(&crypto_alg_sem);
198 
199 	crypto_wait_for_test(larval);
200 }
201 
202 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
203 {
204 	struct crypto_larval *larval = (void *)alg;
205 	long timeout;
206 
207 	if (!crypto_boot_test_finished())
208 		crypto_start_test(larval);
209 
210 	timeout = wait_for_completion_killable_timeout(
211 		&larval->completion, 60 * HZ);
212 
213 	alg = larval->adult;
214 	if (timeout < 0)
215 		alg = ERR_PTR(-EINTR);
216 	else if (!timeout)
217 		alg = ERR_PTR(-ETIMEDOUT);
218 	else if (!alg)
219 		alg = ERR_PTR(-ENOENT);
220 	else if (IS_ERR(alg))
221 		;
222 	else if (crypto_is_test_larval(larval) &&
223 		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 		alg = ERR_PTR(-EAGAIN);
225 	else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226 		alg = ERR_PTR(-EAGAIN);
227 	else if (!crypto_mod_get(alg))
228 		alg = ERR_PTR(-EAGAIN);
229 	crypto_mod_put(&larval->alg);
230 
231 	return alg;
232 }
233 
234 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
235 					    u32 mask)
236 {
237 	const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
238 	struct crypto_alg *alg;
239 	u32 test = 0;
240 
241 	if (!((type | mask) & CRYPTO_ALG_TESTED))
242 		test |= CRYPTO_ALG_TESTED;
243 
244 	down_read(&crypto_alg_sem);
245 	alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246 				  (mask | test) & ~fips);
247 	if (alg) {
248 		if (((type | mask) ^ fips) & fips)
249 			mask |= fips;
250 		mask &= fips;
251 
252 		if (!crypto_is_larval(alg) &&
253 		    ((type ^ alg->cra_flags) & mask)) {
254 			/* Algorithm is disallowed in FIPS mode. */
255 			crypto_mod_put(alg);
256 			alg = ERR_PTR(-ENOENT);
257 		}
258 	} else if (test) {
259 		alg = __crypto_alg_lookup(name, type, mask);
260 		if (alg && !crypto_is_larval(alg)) {
261 			/* Test failed */
262 			crypto_mod_put(alg);
263 			alg = ERR_PTR(-ELIBBAD);
264 		}
265 	}
266 	up_read(&crypto_alg_sem);
267 
268 	return alg;
269 }
270 
271 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
272 					       u32 mask)
273 {
274 	struct crypto_alg *alg;
275 
276 	if (!name)
277 		return ERR_PTR(-ENOENT);
278 
279 	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
280 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281 
282 	alg = crypto_alg_lookup(name, type, mask);
283 	if (!alg && !(mask & CRYPTO_NOLOAD)) {
284 		request_module("crypto-%s", name);
285 
286 		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287 		      CRYPTO_ALG_NEED_FALLBACK))
288 			request_module("crypto-%s-all", name);
289 
290 		alg = crypto_alg_lookup(name, type, mask);
291 	}
292 
293 	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294 		alg = crypto_larval_wait(alg);
295 	else if (!alg)
296 		alg = crypto_larval_add(name, type, mask);
297 
298 	return alg;
299 }
300 
301 int crypto_probing_notify(unsigned long val, void *v)
302 {
303 	int ok;
304 
305 	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
306 	if (ok == NOTIFY_DONE) {
307 		request_module("cryptomgr");
308 		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
309 	}
310 
311 	return ok;
312 }
313 EXPORT_SYMBOL_GPL(crypto_probing_notify);
314 
315 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
316 {
317 	struct crypto_alg *alg;
318 	struct crypto_alg *larval;
319 	int ok;
320 
321 	/*
322 	 * If the internal flag is set for a cipher, require a caller to
323 	 * invoke the cipher with the internal flag to use that cipher.
324 	 * Also, if a caller wants to allocate a cipher that may or may
325 	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
326 	 * !(mask & CRYPTO_ALG_INTERNAL).
327 	 */
328 	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
329 		mask |= CRYPTO_ALG_INTERNAL;
330 
331 	larval = crypto_larval_lookup(name, type, mask);
332 	if (IS_ERR(larval) || !crypto_is_larval(larval))
333 		return larval;
334 
335 	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
336 
337 	if (ok == NOTIFY_STOP)
338 		alg = crypto_larval_wait(larval);
339 	else {
340 		crypto_mod_put(larval);
341 		alg = ERR_PTR(-ENOENT);
342 	}
343 	crypto_larval_kill(larval);
344 	return alg;
345 }
346 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
347 
348 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
349 {
350 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
351 
352 	if (type_obj)
353 		return type_obj->init(tfm, type, mask);
354 	return 0;
355 }
356 
357 static void crypto_exit_ops(struct crypto_tfm *tfm)
358 {
359 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
360 
361 	if (type && tfm->exit)
362 		tfm->exit(tfm);
363 }
364 
365 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
366 {
367 	const struct crypto_type *type_obj = alg->cra_type;
368 	unsigned int len;
369 
370 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
371 	if (type_obj)
372 		return len + type_obj->ctxsize(alg, type, mask);
373 
374 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
375 	default:
376 		BUG();
377 
378 	case CRYPTO_ALG_TYPE_CIPHER:
379 		len += crypto_cipher_ctxsize(alg);
380 		break;
381 
382 	case CRYPTO_ALG_TYPE_COMPRESS:
383 		len += crypto_compress_ctxsize(alg);
384 		break;
385 	}
386 
387 	return len;
388 }
389 
390 void crypto_shoot_alg(struct crypto_alg *alg)
391 {
392 	down_write(&crypto_alg_sem);
393 	alg->cra_flags |= CRYPTO_ALG_DYING;
394 	up_write(&crypto_alg_sem);
395 }
396 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
397 
398 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
399 				      u32 mask)
400 {
401 	struct crypto_tfm *tfm = NULL;
402 	unsigned int tfm_size;
403 	int err = -ENOMEM;
404 
405 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
406 	tfm = kzalloc(tfm_size, GFP_KERNEL);
407 	if (tfm == NULL)
408 		goto out_err;
409 
410 	tfm->__crt_alg = alg;
411 
412 	err = crypto_init_ops(tfm, type, mask);
413 	if (err)
414 		goto out_free_tfm;
415 
416 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
417 		goto cra_init_failed;
418 
419 	goto out;
420 
421 cra_init_failed:
422 	crypto_exit_ops(tfm);
423 out_free_tfm:
424 	if (err == -EAGAIN)
425 		crypto_shoot_alg(alg);
426 	kfree(tfm);
427 out_err:
428 	tfm = ERR_PTR(err);
429 out:
430 	return tfm;
431 }
432 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
433 
434 /*
435  *	crypto_alloc_base - Locate algorithm and allocate transform
436  *	@alg_name: Name of algorithm
437  *	@type: Type of algorithm
438  *	@mask: Mask for type comparison
439  *
440  *	This function should not be used by new algorithm types.
441  *	Please use crypto_alloc_tfm instead.
442  *
443  *	crypto_alloc_base() will first attempt to locate an already loaded
444  *	algorithm.  If that fails and the kernel supports dynamically loadable
445  *	modules, it will then attempt to load a module of the same name or
446  *	alias.  If that fails it will send a query to any loaded crypto manager
447  *	to construct an algorithm on the fly.  A refcount is grabbed on the
448  *	algorithm which is then associated with the new transform.
449  *
450  *	The returned transform is of a non-determinate type.  Most people
451  *	should use one of the more specific allocation functions such as
452  *	crypto_alloc_skcipher().
453  *
454  *	In case of error the return value is an error pointer.
455  */
456 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
457 {
458 	struct crypto_tfm *tfm;
459 	int err;
460 
461 	for (;;) {
462 		struct crypto_alg *alg;
463 
464 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
465 		if (IS_ERR(alg)) {
466 			err = PTR_ERR(alg);
467 			goto err;
468 		}
469 
470 		tfm = __crypto_alloc_tfm(alg, type, mask);
471 		if (!IS_ERR(tfm))
472 			return tfm;
473 
474 		crypto_mod_put(alg);
475 		err = PTR_ERR(tfm);
476 
477 err:
478 		if (err != -EAGAIN)
479 			break;
480 		if (fatal_signal_pending(current)) {
481 			err = -EINTR;
482 			break;
483 		}
484 	}
485 
486 	return ERR_PTR(err);
487 }
488 EXPORT_SYMBOL_GPL(crypto_alloc_base);
489 
490 void *crypto_create_tfm_node(struct crypto_alg *alg,
491 			const struct crypto_type *frontend,
492 			int node)
493 {
494 	char *mem;
495 	struct crypto_tfm *tfm = NULL;
496 	unsigned int tfmsize;
497 	unsigned int total;
498 	int err = -ENOMEM;
499 
500 	tfmsize = frontend->tfmsize;
501 	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
502 
503 	mem = kzalloc_node(total, GFP_KERNEL, node);
504 	if (mem == NULL)
505 		goto out_err;
506 
507 	tfm = (struct crypto_tfm *)(mem + tfmsize);
508 	tfm->__crt_alg = alg;
509 	tfm->node = node;
510 
511 	err = frontend->init_tfm(tfm);
512 	if (err)
513 		goto out_free_tfm;
514 
515 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
516 		goto cra_init_failed;
517 
518 	goto out;
519 
520 cra_init_failed:
521 	crypto_exit_ops(tfm);
522 out_free_tfm:
523 	if (err == -EAGAIN)
524 		crypto_shoot_alg(alg);
525 	kfree(mem);
526 out_err:
527 	mem = ERR_PTR(err);
528 out:
529 	return mem;
530 }
531 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
532 
533 struct crypto_alg *crypto_find_alg(const char *alg_name,
534 				   const struct crypto_type *frontend,
535 				   u32 type, u32 mask)
536 {
537 	if (frontend) {
538 		type &= frontend->maskclear;
539 		mask &= frontend->maskclear;
540 		type |= frontend->type;
541 		mask |= frontend->maskset;
542 	}
543 
544 	return crypto_alg_mod_lookup(alg_name, type, mask);
545 }
546 EXPORT_SYMBOL_GPL(crypto_find_alg);
547 
548 /*
549  *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
550  *	@alg_name: Name of algorithm
551  *	@frontend: Frontend algorithm type
552  *	@type: Type of algorithm
553  *	@mask: Mask for type comparison
554  *	@node: NUMA node in which users desire to put requests, if node is
555  *		NUMA_NO_NODE, it means users have no special requirement.
556  *
557  *	crypto_alloc_tfm() will first attempt to locate an already loaded
558  *	algorithm.  If that fails and the kernel supports dynamically loadable
559  *	modules, it will then attempt to load a module of the same name or
560  *	alias.  If that fails it will send a query to any loaded crypto manager
561  *	to construct an algorithm on the fly.  A refcount is grabbed on the
562  *	algorithm which is then associated with the new transform.
563  *
564  *	The returned transform is of a non-determinate type.  Most people
565  *	should use one of the more specific allocation functions such as
566  *	crypto_alloc_skcipher().
567  *
568  *	In case of error the return value is an error pointer.
569  */
570 
571 void *crypto_alloc_tfm_node(const char *alg_name,
572 		       const struct crypto_type *frontend, u32 type, u32 mask,
573 		       int node)
574 {
575 	void *tfm;
576 	int err;
577 
578 	for (;;) {
579 		struct crypto_alg *alg;
580 
581 		alg = crypto_find_alg(alg_name, frontend, type, mask);
582 		if (IS_ERR(alg)) {
583 			err = PTR_ERR(alg);
584 			goto err;
585 		}
586 
587 		tfm = crypto_create_tfm_node(alg, frontend, node);
588 		if (!IS_ERR(tfm))
589 			return tfm;
590 
591 		crypto_mod_put(alg);
592 		err = PTR_ERR(tfm);
593 
594 err:
595 		if (err != -EAGAIN)
596 			break;
597 		if (fatal_signal_pending(current)) {
598 			err = -EINTR;
599 			break;
600 		}
601 	}
602 
603 	return ERR_PTR(err);
604 }
605 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
606 
607 /*
608  *	crypto_destroy_tfm - Free crypto transform
609  *	@mem: Start of tfm slab
610  *	@tfm: Transform to free
611  *
612  *	This function frees up the transform and any associated resources,
613  *	then drops the refcount on the associated algorithm.
614  */
615 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
616 {
617 	struct crypto_alg *alg;
618 
619 	if (IS_ERR_OR_NULL(mem))
620 		return;
621 
622 	alg = tfm->__crt_alg;
623 
624 	if (!tfm->exit && alg->cra_exit)
625 		alg->cra_exit(tfm);
626 	crypto_exit_ops(tfm);
627 	crypto_mod_put(alg);
628 	kfree_sensitive(mem);
629 }
630 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
631 
632 int crypto_has_alg(const char *name, u32 type, u32 mask)
633 {
634 	int ret = 0;
635 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
636 
637 	if (!IS_ERR(alg)) {
638 		crypto_mod_put(alg);
639 		ret = 1;
640 	}
641 
642 	return ret;
643 }
644 EXPORT_SYMBOL_GPL(crypto_has_alg);
645 
646 void crypto_req_done(void *data, int err)
647 {
648 	struct crypto_wait *wait = data;
649 
650 	if (err == -EINPROGRESS)
651 		return;
652 
653 	wait->err = err;
654 	complete(&wait->completion);
655 }
656 EXPORT_SYMBOL_GPL(crypto_req_done);
657 
658 MODULE_DESCRIPTION("Cryptographic core API");
659 MODULE_LICENSE("GPL");
660