xref: /linux/crypto/ahash.c (revision 4f95a6d2748acffaf866cc58e51d2fd00227e91b)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the implementation of the ahash (asynchronous hash) API.  It differs
6  * from shash (synchronous hash) in that ahash supports asynchronous operations,
7  * and it hashes data from scatterlists instead of virtually addressed buffers.
8  *
9  * The ahash API provides access to both ahash and shash algorithms.  The shash
10  * API only provides access to shash algorithms.
11  *
12  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13  */
14 
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/sched.h>
21 #include <linux/slab.h>
22 #include <linux/seq_file.h>
23 #include <linux/string.h>
24 #include <linux/string_choices.h>
25 #include <net/netlink.h>
26 
27 #include "hash.h"
28 
29 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
30 
31 struct crypto_hash_walk {
32 	char *data;
33 
34 	unsigned int offset;
35 	unsigned int flags;
36 
37 	struct page *pg;
38 	unsigned int entrylen;
39 
40 	unsigned int total;
41 	struct scatterlist *sg;
42 };
43 
44 static int hash_walk_next(struct crypto_hash_walk *walk)
45 {
46 	unsigned int offset = walk->offset;
47 	unsigned int nbytes = min(walk->entrylen,
48 				  ((unsigned int)(PAGE_SIZE)) - offset);
49 
50 	walk->data = kmap_local_page(walk->pg);
51 	walk->data += offset;
52 	walk->entrylen -= nbytes;
53 	return nbytes;
54 }
55 
56 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
57 {
58 	struct scatterlist *sg;
59 
60 	sg = walk->sg;
61 	walk->offset = sg->offset;
62 	walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
63 	walk->offset = offset_in_page(walk->offset);
64 	walk->entrylen = sg->length;
65 
66 	if (walk->entrylen > walk->total)
67 		walk->entrylen = walk->total;
68 	walk->total -= walk->entrylen;
69 
70 	return hash_walk_next(walk);
71 }
72 
73 static int crypto_hash_walk_first(struct ahash_request *req,
74 				  struct crypto_hash_walk *walk)
75 {
76 	walk->total = req->nbytes;
77 
78 	if (!walk->total) {
79 		walk->entrylen = 0;
80 		return 0;
81 	}
82 
83 	walk->sg = req->src;
84 	walk->flags = req->base.flags;
85 
86 	return hash_walk_new_entry(walk);
87 }
88 
89 static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
90 {
91 	walk->data -= walk->offset;
92 
93 	kunmap_local(walk->data);
94 	crypto_yield(walk->flags);
95 
96 	if (err)
97 		return err;
98 
99 	if (walk->entrylen) {
100 		walk->offset = 0;
101 		walk->pg++;
102 		return hash_walk_next(walk);
103 	}
104 
105 	if (!walk->total)
106 		return 0;
107 
108 	walk->sg = sg_next(walk->sg);
109 
110 	return hash_walk_new_entry(walk);
111 }
112 
113 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
114 {
115 	return !(walk->entrylen | walk->total);
116 }
117 
118 /*
119  * For an ahash tfm that is using an shash algorithm (instead of an ahash
120  * algorithm), this returns the underlying shash tfm.
121  */
122 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
123 {
124 	return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
125 }
126 
127 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
128 						    struct crypto_ahash *tfm)
129 {
130 	struct shash_desc *desc = ahash_request_ctx(req);
131 
132 	desc->tfm = ahash_to_shash(tfm);
133 	return desc;
134 }
135 
136 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
137 {
138 	struct crypto_hash_walk walk;
139 	int nbytes;
140 
141 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
142 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
143 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
144 
145 	return nbytes;
146 }
147 EXPORT_SYMBOL_GPL(shash_ahash_update);
148 
149 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
150 {
151 	struct crypto_hash_walk walk;
152 	int nbytes;
153 
154 	nbytes = crypto_hash_walk_first(req, &walk);
155 	if (!nbytes)
156 		return crypto_shash_final(desc, req->result);
157 
158 	do {
159 		nbytes = crypto_hash_walk_last(&walk) ?
160 			 crypto_shash_finup(desc, walk.data, nbytes,
161 					    req->result) :
162 			 crypto_shash_update(desc, walk.data, nbytes);
163 		nbytes = crypto_hash_walk_done(&walk, nbytes);
164 	} while (nbytes > 0);
165 
166 	return nbytes;
167 }
168 EXPORT_SYMBOL_GPL(shash_ahash_finup);
169 
170 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
171 {
172 	unsigned int nbytes = req->nbytes;
173 	struct scatterlist *sg;
174 	unsigned int offset;
175 	int err;
176 
177 	if (nbytes &&
178 	    (sg = req->src, offset = sg->offset,
179 	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
180 		void *data;
181 
182 		data = kmap_local_page(sg_page(sg));
183 		err = crypto_shash_digest(desc, data + offset, nbytes,
184 					  req->result);
185 		kunmap_local(data);
186 	} else
187 		err = crypto_shash_init(desc) ?:
188 		      shash_ahash_finup(req, desc);
189 
190 	return err;
191 }
192 EXPORT_SYMBOL_GPL(shash_ahash_digest);
193 
194 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
195 {
196 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
197 
198 	crypto_free_shash(*ctx);
199 }
200 
201 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
202 {
203 	struct crypto_alg *calg = tfm->__crt_alg;
204 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
205 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
206 	struct crypto_shash *shash;
207 
208 	if (!crypto_mod_get(calg))
209 		return -EAGAIN;
210 
211 	shash = crypto_create_tfm(calg, &crypto_shash_type);
212 	if (IS_ERR(shash)) {
213 		crypto_mod_put(calg);
214 		return PTR_ERR(shash);
215 	}
216 
217 	crt->using_shash = true;
218 	*ctx = shash;
219 	tfm->exit = crypto_exit_ahash_using_shash;
220 
221 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
222 				    CRYPTO_TFM_NEED_KEY);
223 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
224 
225 	return 0;
226 }
227 
228 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
229 			  unsigned int keylen)
230 {
231 	return -ENOSYS;
232 }
233 
234 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
235 {
236 	if (alg->setkey != ahash_nosetkey &&
237 	    !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
238 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
239 }
240 
241 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
242 			unsigned int keylen)
243 {
244 	if (likely(tfm->using_shash)) {
245 		struct crypto_shash *shash = ahash_to_shash(tfm);
246 		int err;
247 
248 		err = crypto_shash_setkey(shash, key, keylen);
249 		if (unlikely(err)) {
250 			crypto_ahash_set_flags(tfm,
251 					       crypto_shash_get_flags(shash) &
252 					       CRYPTO_TFM_NEED_KEY);
253 			return err;
254 		}
255 	} else {
256 		struct ahash_alg *alg = crypto_ahash_alg(tfm);
257 		int err;
258 
259 		err = alg->setkey(tfm, key, keylen);
260 		if (unlikely(err)) {
261 			ahash_set_needkey(tfm, alg);
262 			return err;
263 		}
264 	}
265 	crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
266 	return 0;
267 }
268 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
269 
270 int crypto_ahash_init(struct ahash_request *req)
271 {
272 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
273 
274 	if (likely(tfm->using_shash))
275 		return crypto_shash_init(prepare_shash_desc(req, tfm));
276 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
277 		return -ENOKEY;
278 	return crypto_ahash_alg(tfm)->init(req);
279 }
280 EXPORT_SYMBOL_GPL(crypto_ahash_init);
281 
282 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
283 			  bool has_state)
284 {
285 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
286 	unsigned int ds = crypto_ahash_digestsize(tfm);
287 	struct ahash_request *subreq;
288 	unsigned int subreq_size;
289 	unsigned int reqsize;
290 	u8 *result;
291 	gfp_t gfp;
292 	u32 flags;
293 
294 	subreq_size = sizeof(*subreq);
295 	reqsize = crypto_ahash_reqsize(tfm);
296 	reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
297 	subreq_size += reqsize;
298 	subreq_size += ds;
299 
300 	flags = ahash_request_flags(req);
301 	gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?  GFP_KERNEL : GFP_ATOMIC;
302 	subreq = kmalloc(subreq_size, gfp);
303 	if (!subreq)
304 		return -ENOMEM;
305 
306 	ahash_request_set_tfm(subreq, tfm);
307 	ahash_request_set_callback(subreq, flags, cplt, req);
308 
309 	result = (u8 *)(subreq + 1) + reqsize;
310 
311 	ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
312 
313 	if (has_state) {
314 		void *state;
315 
316 		state = kmalloc(crypto_ahash_statesize(tfm), gfp);
317 		if (!state) {
318 			kfree(subreq);
319 			return -ENOMEM;
320 		}
321 
322 		crypto_ahash_export(req, state);
323 		crypto_ahash_import(subreq, state);
324 		kfree_sensitive(state);
325 	}
326 
327 	req->priv = subreq;
328 
329 	return 0;
330 }
331 
332 static void ahash_restore_req(struct ahash_request *req, int err)
333 {
334 	struct ahash_request *subreq = req->priv;
335 
336 	if (!err)
337 		memcpy(req->result, subreq->result,
338 		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
339 
340 	req->priv = NULL;
341 
342 	kfree_sensitive(subreq);
343 }
344 
345 int crypto_ahash_update(struct ahash_request *req)
346 {
347 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
348 
349 	if (likely(tfm->using_shash))
350 		return shash_ahash_update(req, ahash_request_ctx(req));
351 
352 	return crypto_ahash_alg(tfm)->update(req);
353 }
354 EXPORT_SYMBOL_GPL(crypto_ahash_update);
355 
356 int crypto_ahash_final(struct ahash_request *req)
357 {
358 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
359 
360 	if (likely(tfm->using_shash))
361 		return crypto_shash_final(ahash_request_ctx(req), req->result);
362 
363 	return crypto_ahash_alg(tfm)->final(req);
364 }
365 EXPORT_SYMBOL_GPL(crypto_ahash_final);
366 
367 int crypto_ahash_finup(struct ahash_request *req)
368 {
369 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
370 
371 	if (likely(tfm->using_shash))
372 		return shash_ahash_finup(req, ahash_request_ctx(req));
373 
374 	return crypto_ahash_alg(tfm)->finup(req);
375 }
376 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
377 
378 int crypto_ahash_digest(struct ahash_request *req)
379 {
380 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
381 
382 	if (likely(tfm->using_shash))
383 		return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
384 
385 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
386 		return -ENOKEY;
387 
388 	return crypto_ahash_alg(tfm)->digest(req);
389 }
390 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
391 
392 static void ahash_def_finup_done2(void *data, int err)
393 {
394 	struct ahash_request *areq = data;
395 
396 	if (err == -EINPROGRESS)
397 		return;
398 
399 	ahash_restore_req(areq, err);
400 
401 	ahash_request_complete(areq, err);
402 }
403 
404 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
405 {
406 	struct ahash_request *subreq = req->priv;
407 
408 	if (err)
409 		goto out;
410 
411 	subreq->base.complete = ahash_def_finup_done2;
412 
413 	err = crypto_ahash_alg(crypto_ahash_reqtfm(req))->final(subreq);
414 	if (err == -EINPROGRESS || err == -EBUSY)
415 		return err;
416 
417 out:
418 	ahash_restore_req(req, err);
419 	return err;
420 }
421 
422 static void ahash_def_finup_done1(void *data, int err)
423 {
424 	struct ahash_request *areq = data;
425 	struct ahash_request *subreq;
426 
427 	if (err == -EINPROGRESS)
428 		goto out;
429 
430 	subreq = areq->priv;
431 	subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
432 
433 	err = ahash_def_finup_finish1(areq, err);
434 	if (err == -EINPROGRESS || err == -EBUSY)
435 		return;
436 
437 out:
438 	ahash_request_complete(areq, err);
439 }
440 
441 static int ahash_def_finup(struct ahash_request *req)
442 {
443 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
444 	int err;
445 
446 	err = ahash_save_req(req, ahash_def_finup_done1, true);
447 	if (err)
448 		return err;
449 
450 	err = crypto_ahash_alg(tfm)->update(req->priv);
451 	if (err == -EINPROGRESS || err == -EBUSY)
452 		return err;
453 
454 	return ahash_def_finup_finish1(req, err);
455 }
456 
457 int crypto_ahash_export(struct ahash_request *req, void *out)
458 {
459 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
460 
461 	if (likely(tfm->using_shash))
462 		return crypto_shash_export(ahash_request_ctx(req), out);
463 	return crypto_ahash_alg(tfm)->export(req, out);
464 }
465 EXPORT_SYMBOL_GPL(crypto_ahash_export);
466 
467 int crypto_ahash_import(struct ahash_request *req, const void *in)
468 {
469 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
470 
471 	if (likely(tfm->using_shash))
472 		return crypto_shash_import(prepare_shash_desc(req, tfm), in);
473 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
474 		return -ENOKEY;
475 	return crypto_ahash_alg(tfm)->import(req, in);
476 }
477 EXPORT_SYMBOL_GPL(crypto_ahash_import);
478 
479 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
480 {
481 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
482 	struct ahash_alg *alg = crypto_ahash_alg(hash);
483 
484 	alg->exit_tfm(hash);
485 }
486 
487 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
488 {
489 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
490 	struct ahash_alg *alg = crypto_ahash_alg(hash);
491 
492 	crypto_ahash_set_statesize(hash, alg->halg.statesize);
493 
494 	if (tfm->__crt_alg->cra_type == &crypto_shash_type)
495 		return crypto_init_ahash_using_shash(tfm);
496 
497 	ahash_set_needkey(hash, alg);
498 
499 	if (alg->exit_tfm)
500 		tfm->exit = crypto_ahash_exit_tfm;
501 
502 	return alg->init_tfm ? alg->init_tfm(hash) : 0;
503 }
504 
505 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
506 {
507 	if (alg->cra_type == &crypto_shash_type)
508 		return sizeof(struct crypto_shash *);
509 
510 	return crypto_alg_extsize(alg);
511 }
512 
513 static void crypto_ahash_free_instance(struct crypto_instance *inst)
514 {
515 	struct ahash_instance *ahash = ahash_instance(inst);
516 
517 	ahash->free(ahash);
518 }
519 
520 static int __maybe_unused crypto_ahash_report(
521 	struct sk_buff *skb, struct crypto_alg *alg)
522 {
523 	struct crypto_report_hash rhash;
524 
525 	memset(&rhash, 0, sizeof(rhash));
526 
527 	strscpy(rhash.type, "ahash", sizeof(rhash.type));
528 
529 	rhash.blocksize = alg->cra_blocksize;
530 	rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
531 
532 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
533 }
534 
535 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
536 	__maybe_unused;
537 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
538 {
539 	seq_printf(m, "type         : ahash\n");
540 	seq_printf(m, "async        : %s\n",
541 		   str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC));
542 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
543 	seq_printf(m, "digestsize   : %u\n",
544 		   __crypto_hash_alg_common(alg)->digestsize);
545 }
546 
547 static const struct crypto_type crypto_ahash_type = {
548 	.extsize = crypto_ahash_extsize,
549 	.init_tfm = crypto_ahash_init_tfm,
550 	.free = crypto_ahash_free_instance,
551 #ifdef CONFIG_PROC_FS
552 	.show = crypto_ahash_show,
553 #endif
554 #if IS_ENABLED(CONFIG_CRYPTO_USER)
555 	.report = crypto_ahash_report,
556 #endif
557 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
558 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
559 	.type = CRYPTO_ALG_TYPE_AHASH,
560 	.tfmsize = offsetof(struct crypto_ahash, base),
561 };
562 
563 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
564 		      struct crypto_instance *inst,
565 		      const char *name, u32 type, u32 mask)
566 {
567 	spawn->base.frontend = &crypto_ahash_type;
568 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
569 }
570 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
571 
572 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
573 					u32 mask)
574 {
575 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
576 }
577 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
578 
579 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
580 {
581 	return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
582 }
583 EXPORT_SYMBOL_GPL(crypto_has_ahash);
584 
585 static bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
586 {
587 	struct crypto_alg *alg = &halg->base;
588 
589 	if (alg->cra_type == &crypto_shash_type)
590 		return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
591 
592 	return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
593 }
594 
595 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
596 {
597 	struct hash_alg_common *halg = crypto_hash_alg_common(hash);
598 	struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
599 	struct crypto_ahash *nhash;
600 	struct ahash_alg *alg;
601 	int err;
602 
603 	if (!crypto_hash_alg_has_setkey(halg)) {
604 		tfm = crypto_tfm_get(tfm);
605 		if (IS_ERR(tfm))
606 			return ERR_CAST(tfm);
607 
608 		return hash;
609 	}
610 
611 	nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
612 
613 	if (IS_ERR(nhash))
614 		return nhash;
615 
616 	nhash->reqsize = hash->reqsize;
617 	nhash->statesize = hash->statesize;
618 
619 	if (likely(hash->using_shash)) {
620 		struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
621 		struct crypto_shash *shash;
622 
623 		shash = crypto_clone_shash(ahash_to_shash(hash));
624 		if (IS_ERR(shash)) {
625 			err = PTR_ERR(shash);
626 			goto out_free_nhash;
627 		}
628 		nhash->using_shash = true;
629 		*nctx = shash;
630 		return nhash;
631 	}
632 
633 	err = -ENOSYS;
634 	alg = crypto_ahash_alg(hash);
635 	if (!alg->clone_tfm)
636 		goto out_free_nhash;
637 
638 	err = alg->clone_tfm(nhash, hash);
639 	if (err)
640 		goto out_free_nhash;
641 
642 	return nhash;
643 
644 out_free_nhash:
645 	crypto_free_ahash(nhash);
646 	return ERR_PTR(err);
647 }
648 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
649 
650 static int ahash_prepare_alg(struct ahash_alg *alg)
651 {
652 	struct crypto_alg *base = &alg->halg.base;
653 	int err;
654 
655 	if (alg->halg.statesize == 0)
656 		return -EINVAL;
657 
658 	err = hash_prepare_alg(&alg->halg);
659 	if (err)
660 		return err;
661 
662 	base->cra_type = &crypto_ahash_type;
663 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
664 
665 	if (!alg->finup)
666 		alg->finup = ahash_def_finup;
667 	if (!alg->setkey)
668 		alg->setkey = ahash_nosetkey;
669 
670 	return 0;
671 }
672 
673 int crypto_register_ahash(struct ahash_alg *alg)
674 {
675 	struct crypto_alg *base = &alg->halg.base;
676 	int err;
677 
678 	err = ahash_prepare_alg(alg);
679 	if (err)
680 		return err;
681 
682 	return crypto_register_alg(base);
683 }
684 EXPORT_SYMBOL_GPL(crypto_register_ahash);
685 
686 void crypto_unregister_ahash(struct ahash_alg *alg)
687 {
688 	crypto_unregister_alg(&alg->halg.base);
689 }
690 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
691 
692 int crypto_register_ahashes(struct ahash_alg *algs, int count)
693 {
694 	int i, ret;
695 
696 	for (i = 0; i < count; i++) {
697 		ret = crypto_register_ahash(&algs[i]);
698 		if (ret)
699 			goto err;
700 	}
701 
702 	return 0;
703 
704 err:
705 	for (--i; i >= 0; --i)
706 		crypto_unregister_ahash(&algs[i]);
707 
708 	return ret;
709 }
710 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
711 
712 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
713 {
714 	int i;
715 
716 	for (i = count - 1; i >= 0; --i)
717 		crypto_unregister_ahash(&algs[i]);
718 }
719 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
720 
721 int ahash_register_instance(struct crypto_template *tmpl,
722 			    struct ahash_instance *inst)
723 {
724 	int err;
725 
726 	if (WARN_ON(!inst->free))
727 		return -EINVAL;
728 
729 	err = ahash_prepare_alg(&inst->alg);
730 	if (err)
731 		return err;
732 
733 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
734 }
735 EXPORT_SYMBOL_GPL(ahash_register_instance);
736 
737 MODULE_LICENSE("GPL");
738 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
739