xref: /linux/crypto/shash.c (revision a4b16dad46576ce08ecb660fc923d0857dcae107)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Synchronous Cryptographic Hash operations.
4  *
5  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/string.h>
16 #include <net/netlink.h>
17 
18 #include "hash.h"
19 
20 #define MAX_SHASH_ALIGNMASK 63
21 
22 static const struct crypto_type crypto_shash_type;
23 
24 static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg)
25 {
26 	return hash_get_stat(&alg->halg);
27 }
28 
29 static inline int crypto_shash_errstat(struct shash_alg *alg, int err)
30 {
31 	return crypto_hash_errstat(&alg->halg, err);
32 }
33 
34 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
35 		    unsigned int keylen)
36 {
37 	return -ENOSYS;
38 }
39 EXPORT_SYMBOL_GPL(shash_no_setkey);
40 
41 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
42 				  unsigned int keylen)
43 {
44 	struct shash_alg *shash = crypto_shash_alg(tfm);
45 	unsigned long alignmask = crypto_shash_alignmask(tfm);
46 	unsigned long absize;
47 	u8 *buffer, *alignbuffer;
48 	int err;
49 
50 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
51 	buffer = kmalloc(absize, GFP_ATOMIC);
52 	if (!buffer)
53 		return -ENOMEM;
54 
55 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
56 	memcpy(alignbuffer, key, keylen);
57 	err = shash->setkey(tfm, alignbuffer, keylen);
58 	kfree_sensitive(buffer);
59 	return err;
60 }
61 
62 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
63 {
64 	if (crypto_shash_alg_needs_key(alg))
65 		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
66 }
67 
68 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
69 			unsigned int keylen)
70 {
71 	struct shash_alg *shash = crypto_shash_alg(tfm);
72 	unsigned long alignmask = crypto_shash_alignmask(tfm);
73 	int err;
74 
75 	if ((unsigned long)key & alignmask)
76 		err = shash_setkey_unaligned(tfm, key, keylen);
77 	else
78 		err = shash->setkey(tfm, key, keylen);
79 
80 	if (unlikely(err)) {
81 		shash_set_needkey(tfm, shash);
82 		return err;
83 	}
84 
85 	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
86 	return 0;
87 }
88 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
89 
90 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
91 				  unsigned int len)
92 {
93 	struct crypto_shash *tfm = desc->tfm;
94 	struct shash_alg *shash = crypto_shash_alg(tfm);
95 	unsigned long alignmask = crypto_shash_alignmask(tfm);
96 	unsigned int unaligned_len = alignmask + 1 -
97 				     ((unsigned long)data & alignmask);
98 	/*
99 	 * We cannot count on __aligned() working for large values:
100 	 * https://patchwork.kernel.org/patch/9507697/
101 	 */
102 	u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
103 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
104 	int err;
105 
106 	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
107 		return -EINVAL;
108 
109 	if (unaligned_len > len)
110 		unaligned_len = len;
111 
112 	memcpy(buf, data, unaligned_len);
113 	err = shash->update(desc, buf, unaligned_len);
114 	memset(buf, 0, unaligned_len);
115 
116 	return err ?:
117 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
118 }
119 
120 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
121 			unsigned int len)
122 {
123 	struct crypto_shash *tfm = desc->tfm;
124 	struct shash_alg *shash = crypto_shash_alg(tfm);
125 	unsigned long alignmask = crypto_shash_alignmask(tfm);
126 	int err;
127 
128 	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
129 		atomic64_add(len, &shash_get_stat(shash)->hash_tlen);
130 
131 	if ((unsigned long)data & alignmask)
132 		err = shash_update_unaligned(desc, data, len);
133 	else
134 		err = shash->update(desc, data, len);
135 
136 	return crypto_shash_errstat(shash, err);
137 }
138 EXPORT_SYMBOL_GPL(crypto_shash_update);
139 
140 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
141 {
142 	struct crypto_shash *tfm = desc->tfm;
143 	unsigned long alignmask = crypto_shash_alignmask(tfm);
144 	struct shash_alg *shash = crypto_shash_alg(tfm);
145 	unsigned int ds = crypto_shash_digestsize(tfm);
146 	/*
147 	 * We cannot count on __aligned() working for large values:
148 	 * https://patchwork.kernel.org/patch/9507697/
149 	 */
150 	u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
151 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
152 	int err;
153 
154 	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
155 		return -EINVAL;
156 
157 	err = shash->final(desc, buf);
158 	if (err)
159 		goto out;
160 
161 	memcpy(out, buf, ds);
162 
163 out:
164 	memset(buf, 0, ds);
165 	return err;
166 }
167 
168 int crypto_shash_final(struct shash_desc *desc, u8 *out)
169 {
170 	struct crypto_shash *tfm = desc->tfm;
171 	struct shash_alg *shash = crypto_shash_alg(tfm);
172 	unsigned long alignmask = crypto_shash_alignmask(tfm);
173 	int err;
174 
175 	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
176 		atomic64_inc(&shash_get_stat(shash)->hash_cnt);
177 
178 	if ((unsigned long)out & alignmask)
179 		err = shash_final_unaligned(desc, out);
180 	else
181 		err = shash->final(desc, out);
182 
183 	return crypto_shash_errstat(shash, err);
184 }
185 EXPORT_SYMBOL_GPL(crypto_shash_final);
186 
187 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
188 				 unsigned int len, u8 *out)
189 {
190 	return shash_update_unaligned(desc, data, len) ?:
191 	       shash_final_unaligned(desc, out);
192 }
193 
194 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
195 		       unsigned int len, u8 *out)
196 {
197 	struct crypto_shash *tfm = desc->tfm;
198 	struct shash_alg *shash = crypto_shash_alg(tfm);
199 	unsigned long alignmask = crypto_shash_alignmask(tfm);
200 	int err;
201 
202 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
203 		struct crypto_istat_hash *istat = shash_get_stat(shash);
204 
205 		atomic64_inc(&istat->hash_cnt);
206 		atomic64_add(len, &istat->hash_tlen);
207 	}
208 
209 	if (((unsigned long)data | (unsigned long)out) & alignmask)
210 		err = shash_finup_unaligned(desc, data, len, out);
211 	else
212 		err = shash->finup(desc, data, len, out);
213 
214 
215 	return crypto_shash_errstat(shash, err);
216 }
217 EXPORT_SYMBOL_GPL(crypto_shash_finup);
218 
219 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
220 				  unsigned int len, u8 *out)
221 {
222 	return crypto_shash_init(desc) ?:
223 	       shash_update_unaligned(desc, data, len) ?:
224 	       shash_final_unaligned(desc, out);
225 }
226 
227 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
228 			unsigned int len, u8 *out)
229 {
230 	struct crypto_shash *tfm = desc->tfm;
231 	struct shash_alg *shash = crypto_shash_alg(tfm);
232 	unsigned long alignmask = crypto_shash_alignmask(tfm);
233 	int err;
234 
235 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
236 		struct crypto_istat_hash *istat = shash_get_stat(shash);
237 
238 		atomic64_inc(&istat->hash_cnt);
239 		atomic64_add(len, &istat->hash_tlen);
240 	}
241 
242 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
243 		err = -ENOKEY;
244 	else if (((unsigned long)data | (unsigned long)out) & alignmask)
245 		err = shash_digest_unaligned(desc, data, len, out);
246 	else
247 		err = shash->digest(desc, data, len, out);
248 
249 	return crypto_shash_errstat(shash, err);
250 }
251 EXPORT_SYMBOL_GPL(crypto_shash_digest);
252 
253 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
254 			    unsigned int len, u8 *out)
255 {
256 	SHASH_DESC_ON_STACK(desc, tfm);
257 	int err;
258 
259 	desc->tfm = tfm;
260 
261 	err = crypto_shash_digest(desc, data, len, out);
262 
263 	shash_desc_zero(desc);
264 
265 	return err;
266 }
267 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
268 
269 static int shash_default_export(struct shash_desc *desc, void *out)
270 {
271 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
272 	return 0;
273 }
274 
275 static int shash_default_import(struct shash_desc *desc, const void *in)
276 {
277 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
278 	return 0;
279 }
280 
281 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
282 			      unsigned int keylen)
283 {
284 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
285 
286 	return crypto_shash_setkey(*ctx, key, keylen);
287 }
288 
289 static int shash_async_init(struct ahash_request *req)
290 {
291 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
292 	struct shash_desc *desc = ahash_request_ctx(req);
293 
294 	desc->tfm = *ctx;
295 
296 	return crypto_shash_init(desc);
297 }
298 
299 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
300 {
301 	struct crypto_hash_walk walk;
302 	int nbytes;
303 
304 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
305 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
306 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
307 
308 	return nbytes;
309 }
310 EXPORT_SYMBOL_GPL(shash_ahash_update);
311 
312 static int shash_async_update(struct ahash_request *req)
313 {
314 	return shash_ahash_update(req, ahash_request_ctx(req));
315 }
316 
317 static int shash_async_final(struct ahash_request *req)
318 {
319 	return crypto_shash_final(ahash_request_ctx(req), req->result);
320 }
321 
322 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
323 {
324 	struct crypto_hash_walk walk;
325 	int nbytes;
326 
327 	nbytes = crypto_hash_walk_first(req, &walk);
328 	if (!nbytes)
329 		return crypto_shash_final(desc, req->result);
330 
331 	do {
332 		nbytes = crypto_hash_walk_last(&walk) ?
333 			 crypto_shash_finup(desc, walk.data, nbytes,
334 					    req->result) :
335 			 crypto_shash_update(desc, walk.data, nbytes);
336 		nbytes = crypto_hash_walk_done(&walk, nbytes);
337 	} while (nbytes > 0);
338 
339 	return nbytes;
340 }
341 EXPORT_SYMBOL_GPL(shash_ahash_finup);
342 
343 static int shash_async_finup(struct ahash_request *req)
344 {
345 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
346 	struct shash_desc *desc = ahash_request_ctx(req);
347 
348 	desc->tfm = *ctx;
349 
350 	return shash_ahash_finup(req, desc);
351 }
352 
353 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
354 {
355 	unsigned int nbytes = req->nbytes;
356 	struct scatterlist *sg;
357 	unsigned int offset;
358 	int err;
359 
360 	if (nbytes &&
361 	    (sg = req->src, offset = sg->offset,
362 	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
363 		void *data;
364 
365 		data = kmap_local_page(sg_page(sg));
366 		err = crypto_shash_digest(desc, data + offset, nbytes,
367 					  req->result);
368 		kunmap_local(data);
369 	} else
370 		err = crypto_shash_init(desc) ?:
371 		      shash_ahash_finup(req, desc);
372 
373 	return err;
374 }
375 EXPORT_SYMBOL_GPL(shash_ahash_digest);
376 
377 static int shash_async_digest(struct ahash_request *req)
378 {
379 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
380 	struct shash_desc *desc = ahash_request_ctx(req);
381 
382 	desc->tfm = *ctx;
383 
384 	return shash_ahash_digest(req, desc);
385 }
386 
387 static int shash_async_export(struct ahash_request *req, void *out)
388 {
389 	return crypto_shash_export(ahash_request_ctx(req), out);
390 }
391 
392 static int shash_async_import(struct ahash_request *req, const void *in)
393 {
394 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
395 	struct shash_desc *desc = ahash_request_ctx(req);
396 
397 	desc->tfm = *ctx;
398 
399 	return crypto_shash_import(desc, in);
400 }
401 
402 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
403 {
404 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
405 
406 	crypto_free_shash(*ctx);
407 }
408 
409 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
410 {
411 	struct crypto_alg *calg = tfm->__crt_alg;
412 	struct shash_alg *alg = __crypto_shash_alg(calg);
413 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
414 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
415 	struct crypto_shash *shash;
416 
417 	if (!crypto_mod_get(calg))
418 		return -EAGAIN;
419 
420 	shash = crypto_create_tfm(calg, &crypto_shash_type);
421 	if (IS_ERR(shash)) {
422 		crypto_mod_put(calg);
423 		return PTR_ERR(shash);
424 	}
425 
426 	*ctx = shash;
427 	tfm->exit = crypto_exit_shash_ops_async;
428 
429 	crt->init = shash_async_init;
430 	crt->update = shash_async_update;
431 	crt->final = shash_async_final;
432 	crt->finup = shash_async_finup;
433 	crt->digest = shash_async_digest;
434 	if (crypto_shash_alg_has_setkey(alg))
435 		crt->setkey = shash_async_setkey;
436 
437 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
438 				    CRYPTO_TFM_NEED_KEY);
439 
440 	crt->export = shash_async_export;
441 	crt->import = shash_async_import;
442 
443 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
444 
445 	return 0;
446 }
447 
448 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
449 {
450 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
451 	struct shash_alg *alg = crypto_shash_alg(hash);
452 
453 	alg->exit_tfm(hash);
454 }
455 
456 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
457 {
458 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
459 	struct shash_alg *alg = crypto_shash_alg(hash);
460 	int err;
461 
462 	hash->descsize = alg->descsize;
463 
464 	shash_set_needkey(hash, alg);
465 
466 	if (alg->exit_tfm)
467 		tfm->exit = crypto_shash_exit_tfm;
468 
469 	if (!alg->init_tfm)
470 		return 0;
471 
472 	err = alg->init_tfm(hash);
473 	if (err)
474 		return err;
475 
476 	/* ->init_tfm() may have increased the descsize. */
477 	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
478 		if (alg->exit_tfm)
479 			alg->exit_tfm(hash);
480 		return -EINVAL;
481 	}
482 
483 	return 0;
484 }
485 
486 static void crypto_shash_free_instance(struct crypto_instance *inst)
487 {
488 	struct shash_instance *shash = shash_instance(inst);
489 
490 	shash->free(shash);
491 }
492 
493 static int __maybe_unused crypto_shash_report(
494 	struct sk_buff *skb, struct crypto_alg *alg)
495 {
496 	struct crypto_report_hash rhash;
497 	struct shash_alg *salg = __crypto_shash_alg(alg);
498 
499 	memset(&rhash, 0, sizeof(rhash));
500 
501 	strscpy(rhash.type, "shash", sizeof(rhash.type));
502 
503 	rhash.blocksize = alg->cra_blocksize;
504 	rhash.digestsize = salg->digestsize;
505 
506 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
507 }
508 
509 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
510 	__maybe_unused;
511 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
512 {
513 	struct shash_alg *salg = __crypto_shash_alg(alg);
514 
515 	seq_printf(m, "type         : shash\n");
516 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
517 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
518 }
519 
520 static int __maybe_unused crypto_shash_report_stat(
521 	struct sk_buff *skb, struct crypto_alg *alg)
522 {
523 	return crypto_hash_report_stat(skb, alg, "shash");
524 }
525 
526 static const struct crypto_type crypto_shash_type = {
527 	.extsize = crypto_alg_extsize,
528 	.init_tfm = crypto_shash_init_tfm,
529 	.free = crypto_shash_free_instance,
530 #ifdef CONFIG_PROC_FS
531 	.show = crypto_shash_show,
532 #endif
533 #ifdef CONFIG_CRYPTO_USER
534 	.report = crypto_shash_report,
535 #endif
536 #ifdef CONFIG_CRYPTO_STATS
537 	.report_stat = crypto_shash_report_stat,
538 #endif
539 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
540 	.maskset = CRYPTO_ALG_TYPE_MASK,
541 	.type = CRYPTO_ALG_TYPE_SHASH,
542 	.tfmsize = offsetof(struct crypto_shash, base),
543 };
544 
545 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
546 		      struct crypto_instance *inst,
547 		      const char *name, u32 type, u32 mask)
548 {
549 	spawn->base.frontend = &crypto_shash_type;
550 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
551 }
552 EXPORT_SYMBOL_GPL(crypto_grab_shash);
553 
554 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
555 					u32 mask)
556 {
557 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
558 }
559 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
560 
561 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
562 {
563 	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
564 }
565 EXPORT_SYMBOL_GPL(crypto_has_shash);
566 
567 int hash_prepare_alg(struct hash_alg_common *alg)
568 {
569 	struct crypto_istat_hash *istat = hash_get_stat(alg);
570 	struct crypto_alg *base = &alg->base;
571 
572 	if (alg->digestsize > HASH_MAX_DIGESTSIZE)
573 		return -EINVAL;
574 
575 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
576 
577 	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
578 		memset(istat, 0, sizeof(*istat));
579 
580 	return 0;
581 }
582 
583 static int shash_prepare_alg(struct shash_alg *alg)
584 {
585 	struct crypto_alg *base = &alg->halg.base;
586 	int err;
587 
588 	if (alg->descsize > HASH_MAX_DESCSIZE)
589 		return -EINVAL;
590 
591 	if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
592 		return -EINVAL;
593 
594 	if ((alg->export && !alg->import) || (alg->import && !alg->export))
595 		return -EINVAL;
596 
597 	err = hash_prepare_alg(&alg->halg);
598 	if (err)
599 		return err;
600 
601 	base->cra_type = &crypto_shash_type;
602 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
603 
604 	if (!alg->finup)
605 		alg->finup = shash_finup_unaligned;
606 	if (!alg->digest)
607 		alg->digest = shash_digest_unaligned;
608 	if (!alg->export) {
609 		alg->export = shash_default_export;
610 		alg->import = shash_default_import;
611 		alg->halg.statesize = alg->descsize;
612 	}
613 	if (!alg->setkey)
614 		alg->setkey = shash_no_setkey;
615 
616 	return 0;
617 }
618 
619 int crypto_register_shash(struct shash_alg *alg)
620 {
621 	struct crypto_alg *base = &alg->base;
622 	int err;
623 
624 	err = shash_prepare_alg(alg);
625 	if (err)
626 		return err;
627 
628 	return crypto_register_alg(base);
629 }
630 EXPORT_SYMBOL_GPL(crypto_register_shash);
631 
632 void crypto_unregister_shash(struct shash_alg *alg)
633 {
634 	crypto_unregister_alg(&alg->base);
635 }
636 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
637 
638 int crypto_register_shashes(struct shash_alg *algs, int count)
639 {
640 	int i, ret;
641 
642 	for (i = 0; i < count; i++) {
643 		ret = crypto_register_shash(&algs[i]);
644 		if (ret)
645 			goto err;
646 	}
647 
648 	return 0;
649 
650 err:
651 	for (--i; i >= 0; --i)
652 		crypto_unregister_shash(&algs[i]);
653 
654 	return ret;
655 }
656 EXPORT_SYMBOL_GPL(crypto_register_shashes);
657 
658 void crypto_unregister_shashes(struct shash_alg *algs, int count)
659 {
660 	int i;
661 
662 	for (i = count - 1; i >= 0; --i)
663 		crypto_unregister_shash(&algs[i]);
664 }
665 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
666 
667 int shash_register_instance(struct crypto_template *tmpl,
668 			    struct shash_instance *inst)
669 {
670 	int err;
671 
672 	if (WARN_ON(!inst->free))
673 		return -EINVAL;
674 
675 	err = shash_prepare_alg(&inst->alg);
676 	if (err)
677 		return err;
678 
679 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
680 }
681 EXPORT_SYMBOL_GPL(shash_register_instance);
682 
683 void shash_free_singlespawn_instance(struct shash_instance *inst)
684 {
685 	crypto_drop_spawn(shash_instance_ctx(inst));
686 	kfree(inst);
687 }
688 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
689 
690 MODULE_LICENSE("GPL");
691 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
692