xref: /linux/crypto/ahash.c (revision f6192d0d641f42f3f5b8efeb6e7f5f8bdbedf7bf)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the implementation of the ahash (asynchronous hash) API.  It differs
6  * from shash (synchronous hash) in that ahash supports asynchronous operations,
7  * and it hashes data from scatterlists instead of virtually addressed buffers.
8  *
9  * The ahash API provides access to both ahash and shash algorithms.  The shash
10  * API only provides access to shash algorithms.
11  *
12  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13  */
14 
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/mm.h>
20 #include <linux/module.h>
21 #include <linux/scatterlist.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
24 #include <linux/string.h>
25 #include <linux/string_choices.h>
26 #include <net/netlink.h>
27 
28 #include "hash.h"
29 
30 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
31 
32 struct crypto_hash_walk {
33 	const char *data;
34 
35 	unsigned int offset;
36 	unsigned int flags;
37 
38 	struct page *pg;
39 	unsigned int entrylen;
40 
41 	unsigned int total;
42 	struct scatterlist *sg;
43 };
44 
45 static int ahash_def_finup(struct ahash_request *req);
46 
47 static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm)
48 {
49 	return crypto_ahash_alg(tfm)->halg.base.cra_flags &
50 	       CRYPTO_AHASH_ALG_BLOCK_ONLY;
51 }
52 
53 static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm)
54 {
55 	return crypto_ahash_alg(tfm)->halg.base.cra_flags &
56 	       CRYPTO_AHASH_ALG_FINAL_NONZERO;
57 }
58 
59 static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm)
60 {
61 	return crypto_ahash_alg(tfm)->halg.base.cra_flags &
62 	       CRYPTO_ALG_NEED_FALLBACK;
63 }
64 
65 static inline void ahash_op_done(void *data, int err,
66 				 int (*finish)(struct ahash_request *, int))
67 {
68 	struct ahash_request *areq = data;
69 	crypto_completion_t compl;
70 
71 	compl = areq->saved_complete;
72 	data = areq->saved_data;
73 	if (err == -EINPROGRESS)
74 		goto out;
75 
76 	areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
77 
78 	err = finish(areq, err);
79 	if (err == -EINPROGRESS || err == -EBUSY)
80 		return;
81 
82 out:
83 	compl(data, err);
84 }
85 
86 static int hash_walk_next(struct crypto_hash_walk *walk)
87 {
88 	unsigned int offset = walk->offset;
89 	unsigned int nbytes = min(walk->entrylen,
90 				  ((unsigned int)(PAGE_SIZE)) - offset);
91 
92 	walk->data = kmap_local_page(walk->pg);
93 	walk->data += offset;
94 	walk->entrylen -= nbytes;
95 	return nbytes;
96 }
97 
98 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
99 {
100 	struct scatterlist *sg;
101 
102 	sg = walk->sg;
103 	walk->offset = sg->offset;
104 	walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT));
105 	walk->offset = offset_in_page(walk->offset);
106 	walk->entrylen = sg->length;
107 
108 	if (walk->entrylen > walk->total)
109 		walk->entrylen = walk->total;
110 	walk->total -= walk->entrylen;
111 
112 	return hash_walk_next(walk);
113 }
114 
115 static int crypto_hash_walk_first(struct ahash_request *req,
116 				  struct crypto_hash_walk *walk)
117 {
118 	walk->total = req->nbytes;
119 	walk->entrylen = 0;
120 
121 	if (!walk->total)
122 		return 0;
123 
124 	walk->flags = req->base.flags;
125 
126 	if (ahash_request_isvirt(req)) {
127 		walk->data = req->svirt;
128 		walk->total = 0;
129 		return req->nbytes;
130 	}
131 
132 	walk->sg = req->src;
133 
134 	return hash_walk_new_entry(walk);
135 }
136 
137 static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
138 {
139 	if ((walk->flags & CRYPTO_AHASH_REQ_VIRT))
140 		return err;
141 
142 	walk->data -= walk->offset;
143 
144 	kunmap_local(walk->data);
145 	crypto_yield(walk->flags);
146 
147 	if (err)
148 		return err;
149 
150 	if (walk->entrylen) {
151 		walk->offset = 0;
152 		walk->pg++;
153 		return hash_walk_next(walk);
154 	}
155 
156 	if (!walk->total)
157 		return 0;
158 
159 	walk->sg = sg_next(walk->sg);
160 
161 	return hash_walk_new_entry(walk);
162 }
163 
164 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
165 {
166 	return !(walk->entrylen | walk->total);
167 }
168 
169 /*
170  * For an ahash tfm that is using an shash algorithm (instead of an ahash
171  * algorithm), this returns the underlying shash tfm.
172  */
173 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
174 {
175 	return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
176 }
177 
178 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
179 						    struct crypto_ahash *tfm)
180 {
181 	struct shash_desc *desc = ahash_request_ctx(req);
182 
183 	desc->tfm = ahash_to_shash(tfm);
184 	return desc;
185 }
186 
187 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
188 {
189 	struct crypto_hash_walk walk;
190 	int nbytes;
191 
192 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
193 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
194 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
195 
196 	return nbytes;
197 }
198 EXPORT_SYMBOL_GPL(shash_ahash_update);
199 
200 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
201 {
202 	struct crypto_hash_walk walk;
203 	int nbytes;
204 
205 	nbytes = crypto_hash_walk_first(req, &walk);
206 	if (!nbytes)
207 		return crypto_shash_final(desc, req->result);
208 
209 	do {
210 		nbytes = crypto_hash_walk_last(&walk) ?
211 			 crypto_shash_finup(desc, walk.data, nbytes,
212 					    req->result) :
213 			 crypto_shash_update(desc, walk.data, nbytes);
214 		nbytes = crypto_hash_walk_done(&walk, nbytes);
215 	} while (nbytes > 0);
216 
217 	return nbytes;
218 }
219 EXPORT_SYMBOL_GPL(shash_ahash_finup);
220 
221 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
222 {
223 	unsigned int nbytes = req->nbytes;
224 	struct scatterlist *sg;
225 	unsigned int offset;
226 	struct page *page;
227 	const u8 *data;
228 	int err;
229 
230 	data = req->svirt;
231 	if (!nbytes || ahash_request_isvirt(req))
232 		return crypto_shash_digest(desc, data, nbytes, req->result);
233 
234 	sg = req->src;
235 	if (nbytes > sg->length)
236 		return crypto_shash_init(desc) ?:
237 		       shash_ahash_finup(req, desc);
238 
239 	page = sg_page(sg);
240 	offset = sg->offset;
241 	data = lowmem_page_address(page) + offset;
242 	if (!IS_ENABLED(CONFIG_HIGHMEM))
243 		return crypto_shash_digest(desc, data, nbytes, req->result);
244 
245 	page = nth_page(page, offset >> PAGE_SHIFT);
246 	offset = offset_in_page(offset);
247 
248 	if (nbytes > (unsigned int)PAGE_SIZE - offset)
249 		return crypto_shash_init(desc) ?:
250 		       shash_ahash_finup(req, desc);
251 
252 	data = kmap_local_page(page);
253 	err = crypto_shash_digest(desc, data + offset, nbytes,
254 				  req->result);
255 	kunmap_local(data);
256 	return err;
257 }
258 EXPORT_SYMBOL_GPL(shash_ahash_digest);
259 
260 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
261 {
262 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
263 
264 	crypto_free_shash(*ctx);
265 }
266 
267 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
268 {
269 	struct crypto_alg *calg = tfm->__crt_alg;
270 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
271 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
272 	struct crypto_shash *shash;
273 
274 	if (!crypto_mod_get(calg))
275 		return -EAGAIN;
276 
277 	shash = crypto_create_tfm(calg, &crypto_shash_type);
278 	if (IS_ERR(shash)) {
279 		crypto_mod_put(calg);
280 		return PTR_ERR(shash);
281 	}
282 
283 	crt->using_shash = true;
284 	*ctx = shash;
285 	tfm->exit = crypto_exit_ahash_using_shash;
286 
287 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
288 				    CRYPTO_TFM_NEED_KEY);
289 
290 	return 0;
291 }
292 
293 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
294 			  unsigned int keylen)
295 {
296 	return -ENOSYS;
297 }
298 
299 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
300 {
301 	if (alg->setkey != ahash_nosetkey &&
302 	    !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
303 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
304 }
305 
306 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
307 			unsigned int keylen)
308 {
309 	if (likely(tfm->using_shash)) {
310 		struct crypto_shash *shash = ahash_to_shash(tfm);
311 		int err;
312 
313 		err = crypto_shash_setkey(shash, key, keylen);
314 		if (unlikely(err)) {
315 			crypto_ahash_set_flags(tfm,
316 					       crypto_shash_get_flags(shash) &
317 					       CRYPTO_TFM_NEED_KEY);
318 			return err;
319 		}
320 	} else {
321 		struct ahash_alg *alg = crypto_ahash_alg(tfm);
322 		int err;
323 
324 		err = alg->setkey(tfm, key, keylen);
325 		if (!err && crypto_ahash_need_fallback(tfm))
326 			err = crypto_ahash_setkey(crypto_ahash_fb(tfm),
327 						  key, keylen);
328 		if (unlikely(err)) {
329 			ahash_set_needkey(tfm, alg);
330 			return err;
331 		}
332 	}
333 	crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
334 	return 0;
335 }
336 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
337 
338 static int ahash_do_req_chain(struct ahash_request *req,
339 			      int (*const *op)(struct ahash_request *req))
340 {
341 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
342 	int err;
343 
344 	if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req))
345 		return (*op)(req);
346 
347 	if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE)
348 		return -ENOSYS;
349 
350 	if (!crypto_ahash_need_fallback(tfm))
351 		return -ENOSYS;
352 
353 	{
354 		u8 state[HASH_MAX_STATESIZE];
355 
356 		if (op == &crypto_ahash_alg(tfm)->digest) {
357 			ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
358 			err = crypto_ahash_digest(req);
359 			goto out_no_state;
360 		}
361 
362 		err = crypto_ahash_export(req, state);
363 		ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
364 		err = err ?: crypto_ahash_import(req, state);
365 
366 		if (op == &crypto_ahash_alg(tfm)->finup) {
367 			err = err ?: crypto_ahash_finup(req);
368 			goto out_no_state;
369 		}
370 
371 		err = err ?:
372 		      crypto_ahash_update(req) ?:
373 		      crypto_ahash_export(req, state);
374 
375 		ahash_request_set_tfm(req, tfm);
376 		return err ?: crypto_ahash_import(req, state);
377 
378 out_no_state:
379 		ahash_request_set_tfm(req, tfm);
380 		return err;
381 	}
382 }
383 
384 int crypto_ahash_init(struct ahash_request *req)
385 {
386 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
387 
388 	if (likely(tfm->using_shash))
389 		return crypto_shash_init(prepare_shash_desc(req, tfm));
390 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
391 		return -ENOKEY;
392 	if (ahash_req_on_stack(req) && ahash_is_async(tfm))
393 		return -EAGAIN;
394 	if (crypto_ahash_block_only(tfm)) {
395 		u8 *buf = ahash_request_ctx(req);
396 
397 		buf += crypto_ahash_reqsize(tfm) - 1;
398 		*buf = 0;
399 	}
400 	return crypto_ahash_alg(tfm)->init(req);
401 }
402 EXPORT_SYMBOL_GPL(crypto_ahash_init);
403 
404 static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
405 {
406 	req->saved_complete = req->base.complete;
407 	req->saved_data = req->base.data;
408 	req->base.complete = cplt;
409 	req->base.data = req;
410 }
411 
412 static void ahash_restore_req(struct ahash_request *req)
413 {
414 	req->base.complete = req->saved_complete;
415 	req->base.data = req->saved_data;
416 }
417 
418 static int ahash_update_finish(struct ahash_request *req, int err)
419 {
420 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
421 	bool nonzero = crypto_ahash_final_nonzero(tfm);
422 	int bs = crypto_ahash_blocksize(tfm);
423 	u8 *blenp = ahash_request_ctx(req);
424 	int blen;
425 	u8 *buf;
426 
427 	blenp += crypto_ahash_reqsize(tfm) - 1;
428 	blen = *blenp;
429 	buf = blenp - bs;
430 
431 	if (blen) {
432 		req->src = req->sg_head + 1;
433 		if (sg_is_chain(req->src))
434 			req->src = sg_chain_ptr(req->src);
435 	}
436 
437 	req->nbytes += nonzero - blen;
438 
439 	blen = err < 0 ? 0 : err + nonzero;
440 	if (ahash_request_isvirt(req))
441 		memcpy(buf, req->svirt + req->nbytes - blen, blen);
442 	else
443 		memcpy_from_sglist(buf, req->src, req->nbytes - blen, blen);
444 	*blenp = blen;
445 
446 	ahash_restore_req(req);
447 
448 	return err;
449 }
450 
451 static void ahash_update_done(void *data, int err)
452 {
453 	ahash_op_done(data, err, ahash_update_finish);
454 }
455 
456 int crypto_ahash_update(struct ahash_request *req)
457 {
458 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
459 	bool nonzero = crypto_ahash_final_nonzero(tfm);
460 	int bs = crypto_ahash_blocksize(tfm);
461 	u8 *blenp = ahash_request_ctx(req);
462 	int blen, err;
463 	u8 *buf;
464 
465 	if (likely(tfm->using_shash))
466 		return shash_ahash_update(req, ahash_request_ctx(req));
467 	if (ahash_req_on_stack(req) && ahash_is_async(tfm))
468 		return -EAGAIN;
469 	if (!crypto_ahash_block_only(tfm))
470 		return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
471 
472 	blenp += crypto_ahash_reqsize(tfm) - 1;
473 	blen = *blenp;
474 	buf = blenp - bs;
475 
476 	if (blen + req->nbytes < bs + nonzero) {
477 		if (ahash_request_isvirt(req))
478 			memcpy(buf + blen, req->svirt, req->nbytes);
479 		else
480 			memcpy_from_sglist(buf + blen, req->src, 0,
481 					   req->nbytes);
482 
483 		*blenp += req->nbytes;
484 		return 0;
485 	}
486 
487 	if (blen) {
488 		memset(req->sg_head, 0, sizeof(req->sg_head[0]));
489 		sg_set_buf(req->sg_head, buf, blen);
490 		if (req->src != req->sg_head + 1)
491 			sg_chain(req->sg_head, 2, req->src);
492 		req->src = req->sg_head;
493 		req->nbytes += blen;
494 	}
495 	req->nbytes -= nonzero;
496 
497 	ahash_save_req(req, ahash_update_done);
498 
499 	err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
500 	if (err == -EINPROGRESS || err == -EBUSY)
501 		return err;
502 
503 	return ahash_update_finish(req, err);
504 }
505 EXPORT_SYMBOL_GPL(crypto_ahash_update);
506 
507 static int ahash_finup_finish(struct ahash_request *req, int err)
508 {
509 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
510 	u8 *blenp = ahash_request_ctx(req);
511 	int blen;
512 
513 	blenp += crypto_ahash_reqsize(tfm) - 1;
514 	blen = *blenp;
515 
516 	if (blen) {
517 		if (sg_is_last(req->src))
518 			req->src = NULL;
519 		else {
520 			req->src = req->sg_head + 1;
521 			if (sg_is_chain(req->src))
522 				req->src = sg_chain_ptr(req->src);
523 		}
524 		req->nbytes -= blen;
525 	}
526 
527 	ahash_restore_req(req);
528 
529 	return err;
530 }
531 
532 static void ahash_finup_done(void *data, int err)
533 {
534 	ahash_op_done(data, err, ahash_finup_finish);
535 }
536 
537 int crypto_ahash_finup(struct ahash_request *req)
538 {
539 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
540 	int bs = crypto_ahash_blocksize(tfm);
541 	u8 *blenp = ahash_request_ctx(req);
542 	int blen, err;
543 	u8 *buf;
544 
545 	if (likely(tfm->using_shash))
546 		return shash_ahash_finup(req, ahash_request_ctx(req));
547 	if (ahash_req_on_stack(req) && ahash_is_async(tfm))
548 		return -EAGAIN;
549 	if (!crypto_ahash_alg(tfm)->finup)
550 		return ahash_def_finup(req);
551 	if (!crypto_ahash_block_only(tfm))
552 		return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
553 
554 	blenp += crypto_ahash_reqsize(tfm) - 1;
555 	blen = *blenp;
556 	buf = blenp - bs;
557 
558 	if (blen) {
559 		memset(req->sg_head, 0, sizeof(req->sg_head[0]));
560 		sg_set_buf(req->sg_head, buf, blen);
561 		if (!req->src)
562 			sg_mark_end(req->sg_head);
563 		else if (req->src != req->sg_head + 1)
564 			sg_chain(req->sg_head, 2, req->src);
565 		req->src = req->sg_head;
566 		req->nbytes += blen;
567 	}
568 
569 	ahash_save_req(req, ahash_finup_done);
570 
571 	err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
572 	if (err == -EINPROGRESS || err == -EBUSY)
573 		return err;
574 
575 	return ahash_finup_finish(req, err);
576 }
577 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
578 
579 int crypto_ahash_digest(struct ahash_request *req)
580 {
581 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
582 
583 	if (likely(tfm->using_shash))
584 		return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
585 	if (ahash_req_on_stack(req) && ahash_is_async(tfm))
586 		return -EAGAIN;
587 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
588 		return -ENOKEY;
589 	return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->digest);
590 }
591 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
592 
593 static void ahash_def_finup_done2(void *data, int err)
594 {
595 	struct ahash_request *areq = data;
596 
597 	if (err == -EINPROGRESS)
598 		return;
599 
600 	ahash_restore_req(areq);
601 	ahash_request_complete(areq, err);
602 }
603 
604 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
605 {
606 	if (err)
607 		goto out;
608 
609 	req->base.complete = ahash_def_finup_done2;
610 
611 	err = crypto_ahash_final(req);
612 	if (err == -EINPROGRESS || err == -EBUSY)
613 		return err;
614 
615 out:
616 	ahash_restore_req(req);
617 	return err;
618 }
619 
620 static void ahash_def_finup_done1(void *data, int err)
621 {
622 	ahash_op_done(data, err, ahash_def_finup_finish1);
623 }
624 
625 static int ahash_def_finup(struct ahash_request *req)
626 {
627 	int err;
628 
629 	ahash_save_req(req, ahash_def_finup_done1);
630 
631 	err = crypto_ahash_update(req);
632 	if (err == -EINPROGRESS || err == -EBUSY)
633 		return err;
634 
635 	return ahash_def_finup_finish1(req, err);
636 }
637 
638 int crypto_ahash_export_core(struct ahash_request *req, void *out)
639 {
640 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
641 
642 	if (likely(tfm->using_shash))
643 		return crypto_shash_export_core(ahash_request_ctx(req), out);
644 	return crypto_ahash_alg(tfm)->export_core(req, out);
645 }
646 EXPORT_SYMBOL_GPL(crypto_ahash_export_core);
647 
648 int crypto_ahash_export(struct ahash_request *req, void *out)
649 {
650 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
651 
652 	if (likely(tfm->using_shash))
653 		return crypto_shash_export(ahash_request_ctx(req), out);
654 	if (crypto_ahash_block_only(tfm)) {
655 		unsigned int plen = crypto_ahash_blocksize(tfm) + 1;
656 		unsigned int reqsize = crypto_ahash_reqsize(tfm);
657 		unsigned int ss = crypto_ahash_statesize(tfm);
658 		u8 *buf = ahash_request_ctx(req);
659 
660 		memcpy(out + ss - plen, buf + reqsize - plen, plen);
661 	}
662 	return crypto_ahash_alg(tfm)->export(req, out);
663 }
664 EXPORT_SYMBOL_GPL(crypto_ahash_export);
665 
666 int crypto_ahash_import_core(struct ahash_request *req, const void *in)
667 {
668 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
669 
670 	if (likely(tfm->using_shash))
671 		return crypto_shash_import_core(prepare_shash_desc(req, tfm),
672 						in);
673 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
674 		return -ENOKEY;
675 	return crypto_ahash_alg(tfm)->import_core(req, in);
676 }
677 EXPORT_SYMBOL_GPL(crypto_ahash_import_core);
678 
679 int crypto_ahash_import(struct ahash_request *req, const void *in)
680 {
681 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
682 
683 	if (likely(tfm->using_shash))
684 		return crypto_shash_import(prepare_shash_desc(req, tfm), in);
685 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
686 		return -ENOKEY;
687 	if (crypto_ahash_block_only(tfm)) {
688 		unsigned int reqsize = crypto_ahash_reqsize(tfm);
689 		u8 *buf = ahash_request_ctx(req);
690 
691 		buf[reqsize - 1] = 0;
692 	}
693 	return crypto_ahash_alg(tfm)->import(req, in);
694 }
695 EXPORT_SYMBOL_GPL(crypto_ahash_import);
696 
697 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
698 {
699 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
700 	struct ahash_alg *alg = crypto_ahash_alg(hash);
701 
702 	if (alg->exit_tfm)
703 		alg->exit_tfm(hash);
704 	else if (tfm->__crt_alg->cra_exit)
705 		tfm->__crt_alg->cra_exit(tfm);
706 
707 	if (crypto_ahash_need_fallback(hash))
708 		crypto_free_ahash(crypto_ahash_fb(hash));
709 }
710 
711 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
712 {
713 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
714 	struct ahash_alg *alg = crypto_ahash_alg(hash);
715 	struct crypto_ahash *fb = NULL;
716 	int err;
717 
718 	crypto_ahash_set_statesize(hash, alg->halg.statesize);
719 	crypto_ahash_set_reqsize(hash, crypto_tfm_alg_reqsize(tfm));
720 
721 	if (tfm->__crt_alg->cra_type == &crypto_shash_type)
722 		return crypto_init_ahash_using_shash(tfm);
723 
724 	if (crypto_ahash_need_fallback(hash)) {
725 		fb = crypto_alloc_ahash(crypto_ahash_alg_name(hash),
726 					CRYPTO_ALG_REQ_VIRT,
727 					CRYPTO_ALG_ASYNC |
728 					CRYPTO_ALG_REQ_VIRT |
729 					CRYPTO_AHASH_ALG_NO_EXPORT_CORE);
730 		if (IS_ERR(fb))
731 			return PTR_ERR(fb);
732 
733 		tfm->fb = crypto_ahash_tfm(fb);
734 	}
735 
736 	ahash_set_needkey(hash, alg);
737 
738 	tfm->exit = crypto_ahash_exit_tfm;
739 
740 	if (alg->init_tfm)
741 		err = alg->init_tfm(hash);
742 	else if (tfm->__crt_alg->cra_init)
743 		err = tfm->__crt_alg->cra_init(tfm);
744 	else
745 		return 0;
746 
747 	if (err)
748 		goto out_free_sync_hash;
749 
750 	if (!ahash_is_async(hash) && crypto_ahash_reqsize(hash) >
751 				     MAX_SYNC_HASH_REQSIZE)
752 		goto out_exit_tfm;
753 
754 	BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE);
755 	if (crypto_ahash_reqsize(hash) < HASH_MAX_DESCSIZE)
756 		crypto_ahash_set_reqsize(hash, HASH_MAX_DESCSIZE);
757 
758 	return 0;
759 
760 out_exit_tfm:
761 	if (alg->exit_tfm)
762 		alg->exit_tfm(hash);
763 	else if (tfm->__crt_alg->cra_exit)
764 		tfm->__crt_alg->cra_exit(tfm);
765 	err = -EINVAL;
766 out_free_sync_hash:
767 	crypto_free_ahash(fb);
768 	return err;
769 }
770 
771 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
772 {
773 	if (alg->cra_type == &crypto_shash_type)
774 		return sizeof(struct crypto_shash *);
775 
776 	return crypto_alg_extsize(alg);
777 }
778 
779 static void crypto_ahash_free_instance(struct crypto_instance *inst)
780 {
781 	struct ahash_instance *ahash = ahash_instance(inst);
782 
783 	ahash->free(ahash);
784 }
785 
786 static int __maybe_unused crypto_ahash_report(
787 	struct sk_buff *skb, struct crypto_alg *alg)
788 {
789 	struct crypto_report_hash rhash;
790 
791 	memset(&rhash, 0, sizeof(rhash));
792 
793 	strscpy(rhash.type, "ahash", sizeof(rhash.type));
794 
795 	rhash.blocksize = alg->cra_blocksize;
796 	rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
797 
798 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
799 }
800 
801 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
802 	__maybe_unused;
803 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
804 {
805 	seq_printf(m, "type         : ahash\n");
806 	seq_printf(m, "async        : %s\n",
807 		   str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC));
808 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
809 	seq_printf(m, "digestsize   : %u\n",
810 		   __crypto_hash_alg_common(alg)->digestsize);
811 }
812 
813 static const struct crypto_type crypto_ahash_type = {
814 	.extsize = crypto_ahash_extsize,
815 	.init_tfm = crypto_ahash_init_tfm,
816 	.free = crypto_ahash_free_instance,
817 #ifdef CONFIG_PROC_FS
818 	.show = crypto_ahash_show,
819 #endif
820 #if IS_ENABLED(CONFIG_CRYPTO_USER)
821 	.report = crypto_ahash_report,
822 #endif
823 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
824 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
825 	.type = CRYPTO_ALG_TYPE_AHASH,
826 	.tfmsize = offsetof(struct crypto_ahash, base),
827 	.algsize = offsetof(struct ahash_alg, halg.base),
828 };
829 
830 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
831 		      struct crypto_instance *inst,
832 		      const char *name, u32 type, u32 mask)
833 {
834 	spawn->base.frontend = &crypto_ahash_type;
835 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
836 }
837 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
838 
839 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
840 					u32 mask)
841 {
842 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
843 }
844 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
845 
846 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
847 {
848 	return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
849 }
850 EXPORT_SYMBOL_GPL(crypto_has_ahash);
851 
852 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
853 {
854 	struct crypto_alg *alg = &halg->base;
855 
856 	if (alg->cra_type == &crypto_shash_type)
857 		return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
858 
859 	return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
860 }
861 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
862 
863 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
864 {
865 	struct hash_alg_common *halg = crypto_hash_alg_common(hash);
866 	struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
867 	struct crypto_ahash *fb = NULL;
868 	struct crypto_ahash *nhash;
869 	struct ahash_alg *alg;
870 	int err;
871 
872 	if (!crypto_hash_alg_has_setkey(halg)) {
873 		tfm = crypto_tfm_get(tfm);
874 		if (IS_ERR(tfm))
875 			return ERR_CAST(tfm);
876 
877 		return hash;
878 	}
879 
880 	nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
881 
882 	if (IS_ERR(nhash))
883 		return nhash;
884 
885 	nhash->reqsize = hash->reqsize;
886 	nhash->statesize = hash->statesize;
887 
888 	if (likely(hash->using_shash)) {
889 		struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
890 		struct crypto_shash *shash;
891 
892 		shash = crypto_clone_shash(ahash_to_shash(hash));
893 		if (IS_ERR(shash)) {
894 			err = PTR_ERR(shash);
895 			goto out_free_nhash;
896 		}
897 		crypto_ahash_tfm(nhash)->exit = crypto_exit_ahash_using_shash;
898 		nhash->using_shash = true;
899 		*nctx = shash;
900 		return nhash;
901 	}
902 
903 	if (crypto_ahash_need_fallback(hash)) {
904 		fb = crypto_clone_ahash(crypto_ahash_fb(hash));
905 		err = PTR_ERR(fb);
906 		if (IS_ERR(fb))
907 			goto out_free_nhash;
908 
909 		crypto_ahash_tfm(nhash)->fb = crypto_ahash_tfm(fb);
910 	}
911 
912 	err = -ENOSYS;
913 	alg = crypto_ahash_alg(hash);
914 	if (!alg->clone_tfm)
915 		goto out_free_fb;
916 
917 	err = alg->clone_tfm(nhash, hash);
918 	if (err)
919 		goto out_free_fb;
920 
921 	crypto_ahash_tfm(nhash)->exit = crypto_ahash_exit_tfm;
922 
923 	return nhash;
924 
925 out_free_fb:
926 	crypto_free_ahash(fb);
927 out_free_nhash:
928 	crypto_free_ahash(nhash);
929 	return ERR_PTR(err);
930 }
931 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
932 
933 static int ahash_default_export_core(struct ahash_request *req, void *out)
934 {
935 	return -ENOSYS;
936 }
937 
938 static int ahash_default_import_core(struct ahash_request *req, const void *in)
939 {
940 	return -ENOSYS;
941 }
942 
943 static int ahash_prepare_alg(struct ahash_alg *alg)
944 {
945 	struct crypto_alg *base = &alg->halg.base;
946 	int err;
947 
948 	if (alg->halg.statesize == 0)
949 		return -EINVAL;
950 
951 	if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize)
952 		return -EINVAL;
953 
954 	if (!(base->cra_flags & CRYPTO_ALG_ASYNC) &&
955 	    base->cra_reqsize > MAX_SYNC_HASH_REQSIZE)
956 		return -EINVAL;
957 
958 	if (base->cra_flags & CRYPTO_ALG_NEED_FALLBACK &&
959 	    base->cra_flags & CRYPTO_ALG_NO_FALLBACK)
960 		return -EINVAL;
961 
962 	err = hash_prepare_alg(&alg->halg);
963 	if (err)
964 		return err;
965 
966 	base->cra_type = &crypto_ahash_type;
967 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
968 
969 	if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) &
970 	    (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT) &&
971 	    !(base->cra_flags & CRYPTO_ALG_NO_FALLBACK))
972 		base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK;
973 
974 	if (!alg->setkey)
975 		alg->setkey = ahash_nosetkey;
976 
977 	if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) {
978 		BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256);
979 		if (!alg->finup)
980 			return -EINVAL;
981 
982 		base->cra_reqsize += base->cra_blocksize + 1;
983 		alg->halg.statesize += base->cra_blocksize + 1;
984 		alg->export_core = alg->export;
985 		alg->import_core = alg->import;
986 	} else if (!alg->export_core || !alg->import_core) {
987 		alg->export_core = ahash_default_export_core;
988 		alg->import_core = ahash_default_import_core;
989 		base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE;
990 	}
991 
992 	return 0;
993 }
994 
995 int crypto_register_ahash(struct ahash_alg *alg)
996 {
997 	struct crypto_alg *base = &alg->halg.base;
998 	int err;
999 
1000 	err = ahash_prepare_alg(alg);
1001 	if (err)
1002 		return err;
1003 
1004 	return crypto_register_alg(base);
1005 }
1006 EXPORT_SYMBOL_GPL(crypto_register_ahash);
1007 
1008 void crypto_unregister_ahash(struct ahash_alg *alg)
1009 {
1010 	crypto_unregister_alg(&alg->halg.base);
1011 }
1012 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
1013 
1014 int crypto_register_ahashes(struct ahash_alg *algs, int count)
1015 {
1016 	int i, ret;
1017 
1018 	for (i = 0; i < count; i++) {
1019 		ret = crypto_register_ahash(&algs[i]);
1020 		if (ret)
1021 			goto err;
1022 	}
1023 
1024 	return 0;
1025 
1026 err:
1027 	for (--i; i >= 0; --i)
1028 		crypto_unregister_ahash(&algs[i]);
1029 
1030 	return ret;
1031 }
1032 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
1033 
1034 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
1035 {
1036 	int i;
1037 
1038 	for (i = count - 1; i >= 0; --i)
1039 		crypto_unregister_ahash(&algs[i]);
1040 }
1041 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
1042 
1043 int ahash_register_instance(struct crypto_template *tmpl,
1044 			    struct ahash_instance *inst)
1045 {
1046 	int err;
1047 
1048 	if (WARN_ON(!inst->free))
1049 		return -EINVAL;
1050 
1051 	err = ahash_prepare_alg(&inst->alg);
1052 	if (err)
1053 		return err;
1054 
1055 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
1056 }
1057 EXPORT_SYMBOL_GPL(ahash_register_instance);
1058 
1059 void ahash_request_free(struct ahash_request *req)
1060 {
1061 	if (unlikely(!req))
1062 		return;
1063 
1064 	if (!ahash_req_on_stack(req)) {
1065 		kfree(req);
1066 		return;
1067 	}
1068 
1069 	ahash_request_zero(req);
1070 }
1071 EXPORT_SYMBOL_GPL(ahash_request_free);
1072 
1073 int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data,
1074 		       unsigned int len, u8 *out)
1075 {
1076 	HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm));
1077 	int err;
1078 
1079 	ahash_request_set_callback(req, 0, NULL, NULL);
1080 	ahash_request_set_virt(req, data, out, len);
1081 	err = crypto_ahash_digest(req);
1082 
1083 	ahash_request_zero(req);
1084 
1085 	return err;
1086 }
1087 EXPORT_SYMBOL_GPL(crypto_hash_digest);
1088 
1089 void ahash_free_singlespawn_instance(struct ahash_instance *inst)
1090 {
1091 	crypto_drop_spawn(ahash_instance_ctx(inst));
1092 	kfree(inst);
1093 }
1094 EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance);
1095 
1096 MODULE_LICENSE("GPL");
1097 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
1098