xref: /linux/crypto/ahash.c (revision 02c974294c740bfb747ec64933e12148eb3d99e1)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the implementation of the ahash (asynchronous hash) API.  It differs
6  * from shash (synchronous hash) in that ahash supports asynchronous operations,
7  * and it hashes data from scatterlists instead of virtually addressed buffers.
8  *
9  * The ahash API provides access to both ahash and shash algorithms.  The shash
10  * API only provides access to shash algorithms.
11  *
12  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13  */
14 
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/mm.h>
20 #include <linux/module.h>
21 #include <linux/sched.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
24 #include <linux/string.h>
25 #include <linux/string_choices.h>
26 #include <net/netlink.h>
27 
28 #include "hash.h"
29 
30 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
31 
32 struct crypto_hash_walk {
33 	const char *data;
34 
35 	unsigned int offset;
36 	unsigned int flags;
37 
38 	struct page *pg;
39 	unsigned int entrylen;
40 
41 	unsigned int total;
42 	struct scatterlist *sg;
43 };
44 
45 struct ahash_save_req_state {
46 	struct list_head head;
47 	struct ahash_request *req0;
48 	struct ahash_request *cur;
49 	int (*op)(struct ahash_request *req);
50 	crypto_completion_t compl;
51 	void *data;
52 	struct scatterlist sg;
53 	const u8 *src;
54 	u8 *page;
55 	unsigned int offset;
56 	unsigned int nbytes;
57 };
58 
59 static void ahash_reqchain_done(void *data, int err);
60 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt);
61 static void ahash_restore_req(struct ahash_save_req_state *state);
62 static void ahash_def_finup_done1(void *data, int err);
63 static int ahash_def_finup(struct ahash_request *req);
64 
65 static int hash_walk_next(struct crypto_hash_walk *walk)
66 {
67 	unsigned int offset = walk->offset;
68 	unsigned int nbytes = min(walk->entrylen,
69 				  ((unsigned int)(PAGE_SIZE)) - offset);
70 
71 	walk->data = kmap_local_page(walk->pg);
72 	walk->data += offset;
73 	walk->entrylen -= nbytes;
74 	return nbytes;
75 }
76 
77 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
78 {
79 	struct scatterlist *sg;
80 
81 	sg = walk->sg;
82 	walk->offset = sg->offset;
83 	walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT));
84 	walk->offset = offset_in_page(walk->offset);
85 	walk->entrylen = sg->length;
86 
87 	if (walk->entrylen > walk->total)
88 		walk->entrylen = walk->total;
89 	walk->total -= walk->entrylen;
90 
91 	return hash_walk_next(walk);
92 }
93 
94 static int crypto_hash_walk_first(struct ahash_request *req,
95 				  struct crypto_hash_walk *walk)
96 {
97 	walk->total = req->nbytes;
98 	walk->entrylen = 0;
99 
100 	if (!walk->total)
101 		return 0;
102 
103 	walk->flags = req->base.flags;
104 
105 	if (ahash_request_isvirt(req)) {
106 		walk->data = req->svirt;
107 		walk->total = 0;
108 		return req->nbytes;
109 	}
110 
111 	walk->sg = req->src;
112 
113 	return hash_walk_new_entry(walk);
114 }
115 
116 static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
117 {
118 	if ((walk->flags & CRYPTO_AHASH_REQ_VIRT))
119 		return err;
120 
121 	walk->data -= walk->offset;
122 
123 	kunmap_local(walk->data);
124 	crypto_yield(walk->flags);
125 
126 	if (err)
127 		return err;
128 
129 	if (walk->entrylen) {
130 		walk->offset = 0;
131 		walk->pg++;
132 		return hash_walk_next(walk);
133 	}
134 
135 	if (!walk->total)
136 		return 0;
137 
138 	walk->sg = sg_next(walk->sg);
139 
140 	return hash_walk_new_entry(walk);
141 }
142 
143 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
144 {
145 	return !(walk->entrylen | walk->total);
146 }
147 
148 /*
149  * For an ahash tfm that is using an shash algorithm (instead of an ahash
150  * algorithm), this returns the underlying shash tfm.
151  */
152 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
153 {
154 	return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
155 }
156 
157 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
158 						    struct crypto_ahash *tfm)
159 {
160 	struct shash_desc *desc = ahash_request_ctx(req);
161 
162 	desc->tfm = ahash_to_shash(tfm);
163 	return desc;
164 }
165 
166 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
167 {
168 	struct crypto_hash_walk walk;
169 	int nbytes;
170 
171 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
172 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
173 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
174 
175 	return nbytes;
176 }
177 EXPORT_SYMBOL_GPL(shash_ahash_update);
178 
179 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
180 {
181 	struct crypto_hash_walk walk;
182 	int nbytes;
183 
184 	nbytes = crypto_hash_walk_first(req, &walk);
185 	if (!nbytes)
186 		return crypto_shash_final(desc, req->result);
187 
188 	do {
189 		nbytes = crypto_hash_walk_last(&walk) ?
190 			 crypto_shash_finup(desc, walk.data, nbytes,
191 					    req->result) :
192 			 crypto_shash_update(desc, walk.data, nbytes);
193 		nbytes = crypto_hash_walk_done(&walk, nbytes);
194 	} while (nbytes > 0);
195 
196 	return nbytes;
197 }
198 EXPORT_SYMBOL_GPL(shash_ahash_finup);
199 
200 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
201 {
202 	unsigned int nbytes = req->nbytes;
203 	struct scatterlist *sg;
204 	unsigned int offset;
205 	struct page *page;
206 	const u8 *data;
207 	int err;
208 
209 	data = req->svirt;
210 	if (!nbytes || ahash_request_isvirt(req))
211 		return crypto_shash_digest(desc, data, nbytes, req->result);
212 
213 	sg = req->src;
214 	if (nbytes > sg->length)
215 		return crypto_shash_init(desc) ?:
216 		       shash_ahash_finup(req, desc);
217 
218 	page = sg_page(sg);
219 	offset = sg->offset;
220 	data = lowmem_page_address(page) + offset;
221 	if (!IS_ENABLED(CONFIG_HIGHMEM))
222 		return crypto_shash_digest(desc, data, nbytes, req->result);
223 
224 	page = nth_page(page, offset >> PAGE_SHIFT);
225 	offset = offset_in_page(offset);
226 
227 	if (nbytes > (unsigned int)PAGE_SIZE - offset)
228 		return crypto_shash_init(desc) ?:
229 		       shash_ahash_finup(req, desc);
230 
231 	data = kmap_local_page(page);
232 	err = crypto_shash_digest(desc, data + offset, nbytes,
233 				  req->result);
234 	kunmap_local(data);
235 	return err;
236 }
237 EXPORT_SYMBOL_GPL(shash_ahash_digest);
238 
239 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
240 {
241 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
242 
243 	crypto_free_shash(*ctx);
244 }
245 
246 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
247 {
248 	struct crypto_alg *calg = tfm->__crt_alg;
249 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
250 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
251 	struct crypto_shash *shash;
252 
253 	if (!crypto_mod_get(calg))
254 		return -EAGAIN;
255 
256 	shash = crypto_create_tfm(calg, &crypto_shash_type);
257 	if (IS_ERR(shash)) {
258 		crypto_mod_put(calg);
259 		return PTR_ERR(shash);
260 	}
261 
262 	crt->using_shash = true;
263 	*ctx = shash;
264 	tfm->exit = crypto_exit_ahash_using_shash;
265 
266 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
267 				    CRYPTO_TFM_NEED_KEY);
268 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
269 
270 	return 0;
271 }
272 
273 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
274 			  unsigned int keylen)
275 {
276 	return -ENOSYS;
277 }
278 
279 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
280 {
281 	if (alg->setkey != ahash_nosetkey &&
282 	    !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
283 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
284 }
285 
286 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
287 			unsigned int keylen)
288 {
289 	if (likely(tfm->using_shash)) {
290 		struct crypto_shash *shash = ahash_to_shash(tfm);
291 		int err;
292 
293 		err = crypto_shash_setkey(shash, key, keylen);
294 		if (unlikely(err)) {
295 			crypto_ahash_set_flags(tfm,
296 					       crypto_shash_get_flags(shash) &
297 					       CRYPTO_TFM_NEED_KEY);
298 			return err;
299 		}
300 	} else {
301 		struct ahash_alg *alg = crypto_ahash_alg(tfm);
302 		int err;
303 
304 		err = alg->setkey(tfm, key, keylen);
305 		if (unlikely(err)) {
306 			ahash_set_needkey(tfm, alg);
307 			return err;
308 		}
309 	}
310 	crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
311 	return 0;
312 }
313 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
314 
315 static bool ahash_request_hasvirt(struct ahash_request *req)
316 {
317 	struct ahash_request *r2;
318 
319 	if (ahash_request_isvirt(req))
320 		return true;
321 
322 	list_for_each_entry(r2, &req->base.list, base.list)
323 		if (ahash_request_isvirt(r2))
324 			return true;
325 
326 	return false;
327 }
328 
329 static int ahash_reqchain_virt(struct ahash_save_req_state *state,
330 			       int err, u32 mask)
331 {
332 	struct ahash_request *req = state->cur;
333 
334 	for (;;) {
335 		unsigned len = state->nbytes;
336 
337 		req->base.err = err;
338 
339 		if (!state->offset)
340 			break;
341 
342 		if (state->offset == len || err) {
343 			u8 *result = req->result;
344 
345 			ahash_request_set_virt(req, state->src, result, len);
346 			state->offset = 0;
347 			break;
348 		}
349 
350 		len -= state->offset;
351 
352 		len = min(PAGE_SIZE, len);
353 		memcpy(state->page, state->src + state->offset, len);
354 		state->offset += len;
355 		req->nbytes = len;
356 
357 		err = state->op(req);
358 		if (err == -EINPROGRESS) {
359 			if (!list_empty(&state->head) ||
360 			    state->offset < state->nbytes)
361 				err = -EBUSY;
362 			break;
363 		}
364 
365 		if (err == -EBUSY)
366 			break;
367 	}
368 
369 	return err;
370 }
371 
372 static int ahash_reqchain_finish(struct ahash_save_req_state *state,
373 				 int err, u32 mask)
374 {
375 	struct ahash_request *req0 = state->req0;
376 	struct ahash_request *req = state->cur;
377 	struct crypto_ahash *tfm;
378 	struct ahash_request *n;
379 	bool update;
380 
381 	err = ahash_reqchain_virt(state, err, mask);
382 	if (err == -EINPROGRESS || err == -EBUSY)
383 		goto out;
384 
385 	if (req != req0)
386 		list_add_tail(&req->base.list, &req0->base.list);
387 
388 	tfm = crypto_ahash_reqtfm(req);
389 	update = state->op == crypto_ahash_alg(tfm)->update;
390 
391 	list_for_each_entry_safe(req, n, &state->head, base.list) {
392 		list_del_init(&req->base.list);
393 
394 		req->base.flags &= mask;
395 		req->base.complete = ahash_reqchain_done;
396 		req->base.data = state;
397 		state->cur = req;
398 
399 		if (update && ahash_request_isvirt(req) && req->nbytes) {
400 			unsigned len = req->nbytes;
401 			u8 *result = req->result;
402 
403 			state->src = req->svirt;
404 			state->nbytes = len;
405 
406 			len = min(PAGE_SIZE, len);
407 
408 			memcpy(state->page, req->svirt, len);
409 			state->offset = len;
410 
411 			ahash_request_set_crypt(req, &state->sg, result, len);
412 		}
413 
414 		err = state->op(req);
415 
416 		if (err == -EINPROGRESS) {
417 			if (!list_empty(&state->head) ||
418 			    state->offset < state->nbytes)
419 				err = -EBUSY;
420 			goto out;
421 		}
422 
423 		if (err == -EBUSY)
424 			goto out;
425 
426 		err = ahash_reqchain_virt(state, err, mask);
427 		if (err == -EINPROGRESS || err == -EBUSY)
428 			goto out;
429 
430 		list_add_tail(&req->base.list, &req0->base.list);
431 	}
432 
433 	ahash_restore_req(state);
434 
435 out:
436 	return err;
437 }
438 
439 static void ahash_reqchain_done(void *data, int err)
440 {
441 	struct ahash_save_req_state *state = data;
442 	crypto_completion_t compl = state->compl;
443 
444 	data = state->data;
445 
446 	if (err == -EINPROGRESS) {
447 		if (!list_empty(&state->head) || state->offset < state->nbytes)
448 			return;
449 		goto notify;
450 	}
451 
452 	err = ahash_reqchain_finish(state, err, CRYPTO_TFM_REQ_MAY_BACKLOG);
453 	if (err == -EBUSY)
454 		return;
455 
456 notify:
457 	compl(data, err);
458 }
459 
460 static int ahash_do_req_chain(struct ahash_request *req,
461 			      int (*op)(struct ahash_request *req))
462 {
463 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
464 	bool update = op == crypto_ahash_alg(tfm)->update;
465 	struct ahash_save_req_state *state;
466 	struct ahash_save_req_state state0;
467 	struct ahash_request *r2;
468 	u8 *page = NULL;
469 	int err;
470 
471 	if (crypto_ahash_req_chain(tfm) ||
472 	    (!ahash_request_chained(req) &&
473 	     (!update || !ahash_request_isvirt(req))))
474 		return op(req);
475 
476 	if (update && ahash_request_hasvirt(req)) {
477 		gfp_t gfp;
478 		u32 flags;
479 
480 		flags = ahash_request_flags(req);
481 		gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
482 		      GFP_KERNEL : GFP_ATOMIC;
483 		page = (void *)__get_free_page(gfp);
484 		err = -ENOMEM;
485 		if (!page)
486 			goto out_set_chain;
487 	}
488 
489 	state = &state0;
490 	if (ahash_is_async(tfm)) {
491 		err = ahash_save_req(req, ahash_reqchain_done);
492 		if (err)
493 			goto out_free_page;
494 
495 		state = req->base.data;
496 	}
497 
498 	state->op = op;
499 	state->cur = req;
500 	state->page = page;
501 	state->offset = 0;
502 	state->nbytes = 0;
503 	INIT_LIST_HEAD(&state->head);
504 	list_splice_init(&req->base.list, &state->head);
505 
506 	if (page)
507 		sg_init_one(&state->sg, page, PAGE_SIZE);
508 
509 	if (update && ahash_request_isvirt(req) && req->nbytes) {
510 		unsigned len = req->nbytes;
511 		u8 *result = req->result;
512 
513 		state->src = req->svirt;
514 		state->nbytes = len;
515 
516 		len = min(PAGE_SIZE, len);
517 
518 		memcpy(page, req->svirt, len);
519 		state->offset = len;
520 
521 		ahash_request_set_crypt(req, &state->sg, result, len);
522 	}
523 
524 	err = op(req);
525 	if (err == -EBUSY || err == -EINPROGRESS)
526 		return -EBUSY;
527 
528 	return ahash_reqchain_finish(state, err, ~0);
529 
530 out_free_page:
531 	if (page) {
532 		memset(page, 0, PAGE_SIZE);
533 		free_page((unsigned long)page);
534 	}
535 
536 out_set_chain:
537 	req->base.err = err;
538 	list_for_each_entry(r2, &req->base.list, base.list)
539 		r2->base.err = err;
540 
541 	return err;
542 }
543 
544 int crypto_ahash_init(struct ahash_request *req)
545 {
546 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
547 
548 	if (likely(tfm->using_shash)) {
549 		struct ahash_request *r2;
550 		int err;
551 
552 		err = crypto_shash_init(prepare_shash_desc(req, tfm));
553 		req->base.err = err;
554 
555 		list_for_each_entry(r2, &req->base.list, base.list) {
556 			struct shash_desc *desc;
557 
558 			desc = prepare_shash_desc(r2, tfm);
559 			r2->base.err = crypto_shash_init(desc);
560 		}
561 
562 		return err;
563 	}
564 
565 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
566 		return -ENOKEY;
567 
568 	return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->init);
569 }
570 EXPORT_SYMBOL_GPL(crypto_ahash_init);
571 
572 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
573 {
574 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
575 	struct ahash_save_req_state *state;
576 	gfp_t gfp;
577 	u32 flags;
578 
579 	if (!ahash_is_async(tfm))
580 		return 0;
581 
582 	flags = ahash_request_flags(req);
583 	gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?  GFP_KERNEL : GFP_ATOMIC;
584 	state = kmalloc(sizeof(*state), gfp);
585 	if (!state)
586 		return -ENOMEM;
587 
588 	state->compl = req->base.complete;
589 	state->data = req->base.data;
590 	req->base.complete = cplt;
591 	req->base.data = state;
592 	state->req0 = req;
593 	state->page = NULL;
594 
595 	return 0;
596 }
597 
598 static void ahash_restore_req(struct ahash_save_req_state *state)
599 {
600 	struct ahash_request *req = state->req0;
601 	struct crypto_ahash *tfm;
602 
603 	free_page((unsigned long)state->page);
604 
605 	tfm = crypto_ahash_reqtfm(req);
606 	if (!ahash_is_async(tfm))
607 		return;
608 
609 	state = req->base.data;
610 
611 	req->base.complete = state->compl;
612 	req->base.data = state->data;
613 	kfree(state);
614 }
615 
616 int crypto_ahash_update(struct ahash_request *req)
617 {
618 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
619 
620 	if (likely(tfm->using_shash)) {
621 		struct ahash_request *r2;
622 		int err;
623 
624 		err = shash_ahash_update(req, ahash_request_ctx(req));
625 		req->base.err = err;
626 
627 		list_for_each_entry(r2, &req->base.list, base.list) {
628 			struct shash_desc *desc;
629 
630 			desc = ahash_request_ctx(r2);
631 			r2->base.err = shash_ahash_update(r2, desc);
632 		}
633 
634 		return err;
635 	}
636 
637 	return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->update);
638 }
639 EXPORT_SYMBOL_GPL(crypto_ahash_update);
640 
641 int crypto_ahash_final(struct ahash_request *req)
642 {
643 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
644 
645 	if (likely(tfm->using_shash)) {
646 		struct ahash_request *r2;
647 		int err;
648 
649 		err = crypto_shash_final(ahash_request_ctx(req), req->result);
650 		req->base.err = err;
651 
652 		list_for_each_entry(r2, &req->base.list, base.list) {
653 			struct shash_desc *desc;
654 
655 			desc = ahash_request_ctx(r2);
656 			r2->base.err = crypto_shash_final(desc, r2->result);
657 		}
658 
659 		return err;
660 	}
661 
662 	return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->final);
663 }
664 EXPORT_SYMBOL_GPL(crypto_ahash_final);
665 
666 int crypto_ahash_finup(struct ahash_request *req)
667 {
668 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
669 
670 	if (likely(tfm->using_shash)) {
671 		struct ahash_request *r2;
672 		int err;
673 
674 		err = shash_ahash_finup(req, ahash_request_ctx(req));
675 		req->base.err = err;
676 
677 		list_for_each_entry(r2, &req->base.list, base.list) {
678 			struct shash_desc *desc;
679 
680 			desc = ahash_request_ctx(r2);
681 			r2->base.err = shash_ahash_finup(r2, desc);
682 		}
683 
684 		return err;
685 	}
686 
687 	if (!crypto_ahash_alg(tfm)->finup ||
688 	    (!crypto_ahash_req_chain(tfm) && ahash_request_hasvirt(req)))
689 		return ahash_def_finup(req);
690 
691 	return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->finup);
692 }
693 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
694 
695 static int ahash_def_digest_finish(struct ahash_save_req_state *state, int err)
696 {
697 	struct ahash_request *req = state->req0;
698 	struct crypto_ahash *tfm;
699 
700 	if (err)
701 		goto out;
702 
703 	tfm = crypto_ahash_reqtfm(req);
704 	if (ahash_is_async(tfm))
705 		req->base.complete = ahash_def_finup_done1;
706 
707 	err = crypto_ahash_update(req);
708 	if (err == -EINPROGRESS || err == -EBUSY)
709 		return err;
710 
711 out:
712 	ahash_restore_req(state);
713 	return err;
714 }
715 
716 static void ahash_def_digest_done(void *data, int err)
717 {
718 	struct ahash_save_req_state *state0 = data;
719 	struct ahash_save_req_state state;
720 	struct ahash_request *areq;
721 
722 	state = *state0;
723 	areq = state.req0;
724 	if (err == -EINPROGRESS)
725 		goto out;
726 
727 	areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
728 
729 	err = ahash_def_digest_finish(state0, err);
730 	if (err == -EINPROGRESS || err == -EBUSY)
731 		return;
732 
733 out:
734 	state.compl(state.data, err);
735 }
736 
737 static int ahash_def_digest(struct ahash_request *req)
738 {
739 	struct ahash_save_req_state *state;
740 	int err;
741 
742 	err = ahash_save_req(req, ahash_def_digest_done);
743 	if (err)
744 		return err;
745 
746 	state = req->base.data;
747 
748 	err = crypto_ahash_init(req);
749 	if (err == -EINPROGRESS || err == -EBUSY)
750 		return err;
751 
752 	return ahash_def_digest_finish(state, err);
753 }
754 
755 int crypto_ahash_digest(struct ahash_request *req)
756 {
757 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
758 
759 	if (likely(tfm->using_shash)) {
760 		struct ahash_request *r2;
761 		int err;
762 
763 		err = shash_ahash_digest(req, prepare_shash_desc(req, tfm));
764 		req->base.err = err;
765 
766 		list_for_each_entry(r2, &req->base.list, base.list) {
767 			struct shash_desc *desc;
768 
769 			desc = prepare_shash_desc(r2, tfm);
770 			r2->base.err = shash_ahash_digest(r2, desc);
771 		}
772 
773 		return err;
774 	}
775 
776 	if (!crypto_ahash_req_chain(tfm) && ahash_request_hasvirt(req))
777 		return ahash_def_digest(req);
778 
779 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
780 		return -ENOKEY;
781 
782 	return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->digest);
783 }
784 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
785 
786 static void ahash_def_finup_done2(void *data, int err)
787 {
788 	struct ahash_save_req_state *state = data;
789 	struct ahash_request *areq = state->req0;
790 
791 	if (err == -EINPROGRESS)
792 		return;
793 
794 	ahash_restore_req(state);
795 	ahash_request_complete(areq, err);
796 }
797 
798 static int ahash_def_finup_finish1(struct ahash_save_req_state *state, int err)
799 {
800 	struct ahash_request *req = state->req0;
801 	struct crypto_ahash *tfm;
802 
803 	if (err)
804 		goto out;
805 
806 	tfm = crypto_ahash_reqtfm(req);
807 	if (ahash_is_async(tfm))
808 		req->base.complete = ahash_def_finup_done2;
809 
810 	err = crypto_ahash_final(req);
811 	if (err == -EINPROGRESS || err == -EBUSY)
812 		return err;
813 
814 out:
815 	ahash_restore_req(state);
816 	return err;
817 }
818 
819 static void ahash_def_finup_done1(void *data, int err)
820 {
821 	struct ahash_save_req_state *state0 = data;
822 	struct ahash_save_req_state state;
823 	struct ahash_request *areq;
824 
825 	state = *state0;
826 	areq = state.req0;
827 	if (err == -EINPROGRESS)
828 		goto out;
829 
830 	areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
831 
832 	err = ahash_def_finup_finish1(state0, err);
833 	if (err == -EINPROGRESS || err == -EBUSY)
834 		return;
835 
836 out:
837 	state.compl(state.data, err);
838 }
839 
840 static int ahash_def_finup(struct ahash_request *req)
841 {
842 	struct ahash_save_req_state *state;
843 	int err;
844 
845 	err = ahash_save_req(req, ahash_def_finup_done1);
846 	if (err)
847 		return err;
848 
849 	state = req->base.data;
850 
851 	err = crypto_ahash_update(req);
852 	if (err == -EINPROGRESS || err == -EBUSY)
853 		return err;
854 
855 	return ahash_def_finup_finish1(state, err);
856 }
857 
858 int crypto_ahash_export(struct ahash_request *req, void *out)
859 {
860 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
861 
862 	if (likely(tfm->using_shash))
863 		return crypto_shash_export(ahash_request_ctx(req), out);
864 	return crypto_ahash_alg(tfm)->export(req, out);
865 }
866 EXPORT_SYMBOL_GPL(crypto_ahash_export);
867 
868 int crypto_ahash_import(struct ahash_request *req, const void *in)
869 {
870 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
871 
872 	if (likely(tfm->using_shash))
873 		return crypto_shash_import(prepare_shash_desc(req, tfm), in);
874 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
875 		return -ENOKEY;
876 	return crypto_ahash_alg(tfm)->import(req, in);
877 }
878 EXPORT_SYMBOL_GPL(crypto_ahash_import);
879 
880 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
881 {
882 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
883 	struct ahash_alg *alg = crypto_ahash_alg(hash);
884 
885 	alg->exit_tfm(hash);
886 }
887 
888 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
889 {
890 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
891 	struct ahash_alg *alg = crypto_ahash_alg(hash);
892 
893 	crypto_ahash_set_statesize(hash, alg->halg.statesize);
894 	crypto_ahash_set_reqsize(hash, alg->reqsize);
895 
896 	if (tfm->__crt_alg->cra_type == &crypto_shash_type)
897 		return crypto_init_ahash_using_shash(tfm);
898 
899 	ahash_set_needkey(hash, alg);
900 
901 	if (alg->exit_tfm)
902 		tfm->exit = crypto_ahash_exit_tfm;
903 
904 	return alg->init_tfm ? alg->init_tfm(hash) : 0;
905 }
906 
907 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
908 {
909 	if (alg->cra_type == &crypto_shash_type)
910 		return sizeof(struct crypto_shash *);
911 
912 	return crypto_alg_extsize(alg);
913 }
914 
915 static void crypto_ahash_free_instance(struct crypto_instance *inst)
916 {
917 	struct ahash_instance *ahash = ahash_instance(inst);
918 
919 	ahash->free(ahash);
920 }
921 
922 static int __maybe_unused crypto_ahash_report(
923 	struct sk_buff *skb, struct crypto_alg *alg)
924 {
925 	struct crypto_report_hash rhash;
926 
927 	memset(&rhash, 0, sizeof(rhash));
928 
929 	strscpy(rhash.type, "ahash", sizeof(rhash.type));
930 
931 	rhash.blocksize = alg->cra_blocksize;
932 	rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
933 
934 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
935 }
936 
937 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
938 	__maybe_unused;
939 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
940 {
941 	seq_printf(m, "type         : ahash\n");
942 	seq_printf(m, "async        : %s\n",
943 		   str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC));
944 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
945 	seq_printf(m, "digestsize   : %u\n",
946 		   __crypto_hash_alg_common(alg)->digestsize);
947 }
948 
949 static const struct crypto_type crypto_ahash_type = {
950 	.extsize = crypto_ahash_extsize,
951 	.init_tfm = crypto_ahash_init_tfm,
952 	.free = crypto_ahash_free_instance,
953 #ifdef CONFIG_PROC_FS
954 	.show = crypto_ahash_show,
955 #endif
956 #if IS_ENABLED(CONFIG_CRYPTO_USER)
957 	.report = crypto_ahash_report,
958 #endif
959 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
960 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
961 	.type = CRYPTO_ALG_TYPE_AHASH,
962 	.tfmsize = offsetof(struct crypto_ahash, base),
963 };
964 
965 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
966 		      struct crypto_instance *inst,
967 		      const char *name, u32 type, u32 mask)
968 {
969 	spawn->base.frontend = &crypto_ahash_type;
970 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
971 }
972 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
973 
974 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
975 					u32 mask)
976 {
977 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
978 }
979 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
980 
981 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
982 {
983 	return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
984 }
985 EXPORT_SYMBOL_GPL(crypto_has_ahash);
986 
987 static bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
988 {
989 	struct crypto_alg *alg = &halg->base;
990 
991 	if (alg->cra_type == &crypto_shash_type)
992 		return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
993 
994 	return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
995 }
996 
997 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
998 {
999 	struct hash_alg_common *halg = crypto_hash_alg_common(hash);
1000 	struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
1001 	struct crypto_ahash *nhash;
1002 	struct ahash_alg *alg;
1003 	int err;
1004 
1005 	if (!crypto_hash_alg_has_setkey(halg)) {
1006 		tfm = crypto_tfm_get(tfm);
1007 		if (IS_ERR(tfm))
1008 			return ERR_CAST(tfm);
1009 
1010 		return hash;
1011 	}
1012 
1013 	nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
1014 
1015 	if (IS_ERR(nhash))
1016 		return nhash;
1017 
1018 	nhash->reqsize = hash->reqsize;
1019 	nhash->statesize = hash->statesize;
1020 
1021 	if (likely(hash->using_shash)) {
1022 		struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
1023 		struct crypto_shash *shash;
1024 
1025 		shash = crypto_clone_shash(ahash_to_shash(hash));
1026 		if (IS_ERR(shash)) {
1027 			err = PTR_ERR(shash);
1028 			goto out_free_nhash;
1029 		}
1030 		nhash->using_shash = true;
1031 		*nctx = shash;
1032 		return nhash;
1033 	}
1034 
1035 	err = -ENOSYS;
1036 	alg = crypto_ahash_alg(hash);
1037 	if (!alg->clone_tfm)
1038 		goto out_free_nhash;
1039 
1040 	err = alg->clone_tfm(nhash, hash);
1041 	if (err)
1042 		goto out_free_nhash;
1043 
1044 	return nhash;
1045 
1046 out_free_nhash:
1047 	crypto_free_ahash(nhash);
1048 	return ERR_PTR(err);
1049 }
1050 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
1051 
1052 static int ahash_prepare_alg(struct ahash_alg *alg)
1053 {
1054 	struct crypto_alg *base = &alg->halg.base;
1055 	int err;
1056 
1057 	if (alg->halg.statesize == 0)
1058 		return -EINVAL;
1059 
1060 	if (alg->reqsize && alg->reqsize < alg->halg.statesize)
1061 		return -EINVAL;
1062 
1063 	err = hash_prepare_alg(&alg->halg);
1064 	if (err)
1065 		return err;
1066 
1067 	base->cra_type = &crypto_ahash_type;
1068 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
1069 
1070 	if (!alg->setkey)
1071 		alg->setkey = ahash_nosetkey;
1072 
1073 	return 0;
1074 }
1075 
1076 int crypto_register_ahash(struct ahash_alg *alg)
1077 {
1078 	struct crypto_alg *base = &alg->halg.base;
1079 	int err;
1080 
1081 	err = ahash_prepare_alg(alg);
1082 	if (err)
1083 		return err;
1084 
1085 	return crypto_register_alg(base);
1086 }
1087 EXPORT_SYMBOL_GPL(crypto_register_ahash);
1088 
1089 void crypto_unregister_ahash(struct ahash_alg *alg)
1090 {
1091 	crypto_unregister_alg(&alg->halg.base);
1092 }
1093 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
1094 
1095 int crypto_register_ahashes(struct ahash_alg *algs, int count)
1096 {
1097 	int i, ret;
1098 
1099 	for (i = 0; i < count; i++) {
1100 		ret = crypto_register_ahash(&algs[i]);
1101 		if (ret)
1102 			goto err;
1103 	}
1104 
1105 	return 0;
1106 
1107 err:
1108 	for (--i; i >= 0; --i)
1109 		crypto_unregister_ahash(&algs[i]);
1110 
1111 	return ret;
1112 }
1113 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
1114 
1115 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
1116 {
1117 	int i;
1118 
1119 	for (i = count - 1; i >= 0; --i)
1120 		crypto_unregister_ahash(&algs[i]);
1121 }
1122 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
1123 
1124 int ahash_register_instance(struct crypto_template *tmpl,
1125 			    struct ahash_instance *inst)
1126 {
1127 	int err;
1128 
1129 	if (WARN_ON(!inst->free))
1130 		return -EINVAL;
1131 
1132 	err = ahash_prepare_alg(&inst->alg);
1133 	if (err)
1134 		return err;
1135 
1136 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
1137 }
1138 EXPORT_SYMBOL_GPL(ahash_register_instance);
1139 
1140 void ahash_request_free(struct ahash_request *req)
1141 {
1142 	struct ahash_request *tmp;
1143 	struct ahash_request *r2;
1144 
1145 	if (unlikely(!req))
1146 		return;
1147 
1148 	list_for_each_entry_safe(r2, tmp, &req->base.list, base.list)
1149 		kfree_sensitive(r2);
1150 
1151 	kfree_sensitive(req);
1152 }
1153 EXPORT_SYMBOL_GPL(ahash_request_free);
1154 
1155 MODULE_LICENSE("GPL");
1156 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
1157