xref: /linux/crypto/api.c (revision f15cbe6f1a4b4d9df59142fc8e4abb973302cf44)
1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9  * and Nettle, by Niels Möller.
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include "internal.h"
28 
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
33 
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
36 
37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
38 {
39 	atomic_inc(&alg->cra_refcnt);
40 	return alg;
41 }
42 
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
44 {
45 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
46 }
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
48 
49 void crypto_mod_put(struct crypto_alg *alg)
50 {
51 	struct module *module = alg->cra_module;
52 
53 	crypto_alg_put(alg);
54 	module_put(module);
55 }
56 EXPORT_SYMBOL_GPL(crypto_mod_put);
57 
58 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
59 {
60 	struct crypto_alg *q, *alg = NULL;
61 	int best = -2;
62 
63 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
64 		int exact, fuzzy;
65 
66 		if (crypto_is_moribund(q))
67 			continue;
68 
69 		if ((q->cra_flags ^ type) & mask)
70 			continue;
71 
72 		if (crypto_is_larval(q) &&
73 		    ((struct crypto_larval *)q)->mask != mask)
74 			continue;
75 
76 		exact = !strcmp(q->cra_driver_name, name);
77 		fuzzy = !strcmp(q->cra_name, name);
78 		if (!exact && !(fuzzy && q->cra_priority > best))
79 			continue;
80 
81 		if (unlikely(!crypto_mod_get(q)))
82 			continue;
83 
84 		best = q->cra_priority;
85 		if (alg)
86 			crypto_mod_put(alg);
87 		alg = q;
88 
89 		if (exact)
90 			break;
91 	}
92 
93 	return alg;
94 }
95 EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
96 
97 static void crypto_larval_destroy(struct crypto_alg *alg)
98 {
99 	struct crypto_larval *larval = (void *)alg;
100 
101 	BUG_ON(!crypto_is_larval(alg));
102 	if (larval->adult)
103 		crypto_mod_put(larval->adult);
104 	kfree(larval);
105 }
106 
107 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
108 					      u32 mask)
109 {
110 	struct crypto_alg *alg;
111 	struct crypto_larval *larval;
112 
113 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
114 	if (!larval)
115 		return ERR_PTR(-ENOMEM);
116 
117 	larval->mask = mask;
118 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
119 	larval->alg.cra_priority = -1;
120 	larval->alg.cra_destroy = crypto_larval_destroy;
121 
122 	atomic_set(&larval->alg.cra_refcnt, 2);
123 	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 	init_completion(&larval->completion);
125 
126 	down_write(&crypto_alg_sem);
127 	alg = __crypto_alg_lookup(name, type, mask);
128 	if (!alg) {
129 		alg = &larval->alg;
130 		list_add(&alg->cra_list, &crypto_alg_list);
131 	}
132 	up_write(&crypto_alg_sem);
133 
134 	if (alg != &larval->alg)
135 		kfree(larval);
136 
137 	return alg;
138 }
139 
140 void crypto_larval_kill(struct crypto_alg *alg)
141 {
142 	struct crypto_larval *larval = (void *)alg;
143 
144 	down_write(&crypto_alg_sem);
145 	list_del(&alg->cra_list);
146 	up_write(&crypto_alg_sem);
147 	complete_all(&larval->completion);
148 	crypto_alg_put(alg);
149 }
150 EXPORT_SYMBOL_GPL(crypto_larval_kill);
151 
152 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
153 {
154 	struct crypto_larval *larval = (void *)alg;
155 
156 	wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
157 	alg = larval->adult;
158 	if (alg) {
159 		if (!crypto_mod_get(alg))
160 			alg = ERR_PTR(-EAGAIN);
161 	} else
162 		alg = ERR_PTR(-ENOENT);
163 	crypto_mod_put(&larval->alg);
164 
165 	return alg;
166 }
167 
168 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
169 					    u32 mask)
170 {
171 	struct crypto_alg *alg;
172 
173 	down_read(&crypto_alg_sem);
174 	alg = __crypto_alg_lookup(name, type, mask);
175 	up_read(&crypto_alg_sem);
176 
177 	return alg;
178 }
179 
180 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
181 {
182 	struct crypto_alg *alg;
183 
184 	if (!name)
185 		return ERR_PTR(-ENOENT);
186 
187 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
188 	type &= mask;
189 
190 	alg = try_then_request_module(crypto_alg_lookup(name, type, mask),
191 				      name);
192 	if (alg)
193 		return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
194 
195 	return crypto_larval_alloc(name, type, mask);
196 }
197 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
198 
199 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
200 {
201 	struct crypto_alg *alg;
202 	struct crypto_alg *larval;
203 	int ok;
204 
205 	larval = crypto_larval_lookup(name, type, mask);
206 	if (IS_ERR(larval) || !crypto_is_larval(larval))
207 		return larval;
208 
209 	ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
210 	if (ok == NOTIFY_DONE) {
211 		request_module("cryptomgr");
212 		ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
213 	}
214 
215 	if (ok == NOTIFY_STOP)
216 		alg = crypto_larval_wait(larval);
217 	else {
218 		crypto_mod_put(larval);
219 		alg = ERR_PTR(-ENOENT);
220 	}
221 	crypto_larval_kill(larval);
222 	return alg;
223 }
224 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
225 
226 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
227 {
228 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
229 
230 	if (type_obj)
231 		return type_obj->init(tfm, type, mask);
232 
233 	switch (crypto_tfm_alg_type(tfm)) {
234 	case CRYPTO_ALG_TYPE_CIPHER:
235 		return crypto_init_cipher_ops(tfm);
236 
237 	case CRYPTO_ALG_TYPE_DIGEST:
238 		if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
239 		    CRYPTO_ALG_TYPE_HASH_MASK)
240 			return crypto_init_digest_ops_async(tfm);
241 		else
242 			return crypto_init_digest_ops(tfm);
243 
244 	case CRYPTO_ALG_TYPE_COMPRESS:
245 		return crypto_init_compress_ops(tfm);
246 
247 	default:
248 		break;
249 	}
250 
251 	BUG();
252 	return -EINVAL;
253 }
254 
255 static void crypto_exit_ops(struct crypto_tfm *tfm)
256 {
257 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
258 
259 	if (type) {
260 		if (type->exit)
261 			type->exit(tfm);
262 		return;
263 	}
264 
265 	switch (crypto_tfm_alg_type(tfm)) {
266 	case CRYPTO_ALG_TYPE_CIPHER:
267 		crypto_exit_cipher_ops(tfm);
268 		break;
269 
270 	case CRYPTO_ALG_TYPE_DIGEST:
271 		crypto_exit_digest_ops(tfm);
272 		break;
273 
274 	case CRYPTO_ALG_TYPE_COMPRESS:
275 		crypto_exit_compress_ops(tfm);
276 		break;
277 
278 	default:
279 		BUG();
280 
281 	}
282 }
283 
284 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
285 {
286 	const struct crypto_type *type_obj = alg->cra_type;
287 	unsigned int len;
288 
289 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
290 	if (type_obj)
291 		return len + type_obj->ctxsize(alg, type, mask);
292 
293 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
294 	default:
295 		BUG();
296 
297 	case CRYPTO_ALG_TYPE_CIPHER:
298 		len += crypto_cipher_ctxsize(alg);
299 		break;
300 
301 	case CRYPTO_ALG_TYPE_DIGEST:
302 		len += crypto_digest_ctxsize(alg);
303 		break;
304 
305 	case CRYPTO_ALG_TYPE_COMPRESS:
306 		len += crypto_compress_ctxsize(alg);
307 		break;
308 	}
309 
310 	return len;
311 }
312 
313 void crypto_shoot_alg(struct crypto_alg *alg)
314 {
315 	down_write(&crypto_alg_sem);
316 	alg->cra_flags |= CRYPTO_ALG_DYING;
317 	up_write(&crypto_alg_sem);
318 }
319 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
320 
321 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
322 				      u32 mask)
323 {
324 	struct crypto_tfm *tfm = NULL;
325 	unsigned int tfm_size;
326 	int err = -ENOMEM;
327 
328 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
329 	tfm = kzalloc(tfm_size, GFP_KERNEL);
330 	if (tfm == NULL)
331 		goto out_err;
332 
333 	tfm->__crt_alg = alg;
334 
335 	err = crypto_init_ops(tfm, type, mask);
336 	if (err)
337 		goto out_free_tfm;
338 
339 	if (alg->cra_init && (err = alg->cra_init(tfm))) {
340 		if (err == -EAGAIN)
341 			crypto_shoot_alg(alg);
342 		goto cra_init_failed;
343 	}
344 
345 	goto out;
346 
347 cra_init_failed:
348 	crypto_exit_ops(tfm);
349 out_free_tfm:
350 	kfree(tfm);
351 out_err:
352 	tfm = ERR_PTR(err);
353 out:
354 	return tfm;
355 }
356 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
357 
358 /*
359  *	crypto_alloc_base - Locate algorithm and allocate transform
360  *	@alg_name: Name of algorithm
361  *	@type: Type of algorithm
362  *	@mask: Mask for type comparison
363  *
364  *	crypto_alloc_base() will first attempt to locate an already loaded
365  *	algorithm.  If that fails and the kernel supports dynamically loadable
366  *	modules, it will then attempt to load a module of the same name or
367  *	alias.  If that fails it will send a query to any loaded crypto manager
368  *	to construct an algorithm on the fly.  A refcount is grabbed on the
369  *	algorithm which is then associated with the new transform.
370  *
371  *	The returned transform is of a non-determinate type.  Most people
372  *	should use one of the more specific allocation functions such as
373  *	crypto_alloc_blkcipher.
374  *
375  *	In case of error the return value is an error pointer.
376  */
377 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
378 {
379 	struct crypto_tfm *tfm;
380 	int err;
381 
382 	for (;;) {
383 		struct crypto_alg *alg;
384 
385 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
386 		if (IS_ERR(alg)) {
387 			err = PTR_ERR(alg);
388 			goto err;
389 		}
390 
391 		tfm = __crypto_alloc_tfm(alg, type, mask);
392 		if (!IS_ERR(tfm))
393 			return tfm;
394 
395 		crypto_mod_put(alg);
396 		err = PTR_ERR(tfm);
397 
398 err:
399 		if (err != -EAGAIN)
400 			break;
401 		if (signal_pending(current)) {
402 			err = -EINTR;
403 			break;
404 		}
405 	}
406 
407 	return ERR_PTR(err);
408 }
409 EXPORT_SYMBOL_GPL(crypto_alloc_base);
410 
411 /*
412  *	crypto_free_tfm - Free crypto transform
413  *	@tfm: Transform to free
414  *
415  *	crypto_free_tfm() frees up the transform and any associated resources,
416  *	then drops the refcount on the associated algorithm.
417  */
418 void crypto_free_tfm(struct crypto_tfm *tfm)
419 {
420 	struct crypto_alg *alg;
421 	int size;
422 
423 	if (unlikely(!tfm))
424 		return;
425 
426 	alg = tfm->__crt_alg;
427 	size = sizeof(*tfm) + alg->cra_ctxsize;
428 
429 	if (alg->cra_exit)
430 		alg->cra_exit(tfm);
431 	crypto_exit_ops(tfm);
432 	crypto_mod_put(alg);
433 	memset(tfm, 0, size);
434 	kfree(tfm);
435 }
436 
437 EXPORT_SYMBOL_GPL(crypto_free_tfm);
438 
439 int crypto_has_alg(const char *name, u32 type, u32 mask)
440 {
441 	int ret = 0;
442 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
443 
444 	if (!IS_ERR(alg)) {
445 		crypto_mod_put(alg);
446 		ret = 1;
447 	}
448 
449 	return ret;
450 }
451 EXPORT_SYMBOL_GPL(crypto_has_alg);
452 
453 MODULE_DESCRIPTION("Cryptographic core API");
454 MODULE_LICENSE("GPL");
455