1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Asynchronous Cryptographic Hash operations.
4 *
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
8 *
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
11 *
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13 */
14
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/mm.h>
20 #include <linux/module.h>
21 #include <linux/scatterlist.h>
22 #include <linux/slab.h>
23 #include <linux/seq_file.h>
24 #include <linux/string.h>
25 #include <linux/string_choices.h>
26 #include <net/netlink.h>
27
28 #include "hash.h"
29
30 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
31
32 struct crypto_hash_walk {
33 const char *data;
34
35 unsigned int offset;
36 unsigned int flags;
37
38 struct page *pg;
39 unsigned int entrylen;
40
41 unsigned int total;
42 struct scatterlist *sg;
43 };
44
45 static int ahash_def_finup(struct ahash_request *req);
46
crypto_ahash_block_only(struct crypto_ahash * tfm)47 static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm)
48 {
49 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
50 CRYPTO_AHASH_ALG_BLOCK_ONLY;
51 }
52
crypto_ahash_final_nonzero(struct crypto_ahash * tfm)53 static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm)
54 {
55 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
56 CRYPTO_AHASH_ALG_FINAL_NONZERO;
57 }
58
crypto_ahash_need_fallback(struct crypto_ahash * tfm)59 static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm)
60 {
61 return crypto_ahash_alg(tfm)->halg.base.cra_flags &
62 CRYPTO_ALG_NEED_FALLBACK;
63 }
64
ahash_op_done(void * data,int err,int (* finish)(struct ahash_request *,int))65 static inline void ahash_op_done(void *data, int err,
66 int (*finish)(struct ahash_request *, int))
67 {
68 struct ahash_request *areq = data;
69 crypto_completion_t compl;
70
71 compl = areq->saved_complete;
72 data = areq->saved_data;
73 if (err == -EINPROGRESS)
74 goto out;
75
76 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
77
78 err = finish(areq, err);
79 if (err == -EINPROGRESS || err == -EBUSY)
80 return;
81
82 out:
83 compl(data, err);
84 }
85
hash_walk_next(struct crypto_hash_walk * walk)86 static int hash_walk_next(struct crypto_hash_walk *walk)
87 {
88 unsigned int offset = walk->offset;
89 unsigned int nbytes = min(walk->entrylen,
90 ((unsigned int)(PAGE_SIZE)) - offset);
91
92 walk->data = kmap_local_page(walk->pg);
93 walk->data += offset;
94 walk->entrylen -= nbytes;
95 return nbytes;
96 }
97
hash_walk_new_entry(struct crypto_hash_walk * walk)98 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
99 {
100 struct scatterlist *sg;
101
102 sg = walk->sg;
103 walk->offset = sg->offset;
104 walk->pg = nth_page(sg_page(walk->sg), (walk->offset >> PAGE_SHIFT));
105 walk->offset = offset_in_page(walk->offset);
106 walk->entrylen = sg->length;
107
108 if (walk->entrylen > walk->total)
109 walk->entrylen = walk->total;
110 walk->total -= walk->entrylen;
111
112 return hash_walk_next(walk);
113 }
114
crypto_hash_walk_first(struct ahash_request * req,struct crypto_hash_walk * walk)115 static int crypto_hash_walk_first(struct ahash_request *req,
116 struct crypto_hash_walk *walk)
117 {
118 walk->total = req->nbytes;
119 walk->entrylen = 0;
120
121 if (!walk->total)
122 return 0;
123
124 walk->flags = req->base.flags;
125
126 if (ahash_request_isvirt(req)) {
127 walk->data = req->svirt;
128 walk->total = 0;
129 return req->nbytes;
130 }
131
132 walk->sg = req->src;
133
134 return hash_walk_new_entry(walk);
135 }
136
crypto_hash_walk_done(struct crypto_hash_walk * walk,int err)137 static int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
138 {
139 if ((walk->flags & CRYPTO_AHASH_REQ_VIRT))
140 return err;
141
142 walk->data -= walk->offset;
143
144 kunmap_local(walk->data);
145 crypto_yield(walk->flags);
146
147 if (err)
148 return err;
149
150 if (walk->entrylen) {
151 walk->offset = 0;
152 walk->pg++;
153 return hash_walk_next(walk);
154 }
155
156 if (!walk->total)
157 return 0;
158
159 walk->sg = sg_next(walk->sg);
160
161 return hash_walk_new_entry(walk);
162 }
163
crypto_hash_walk_last(struct crypto_hash_walk * walk)164 static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
165 {
166 return !(walk->entrylen | walk->total);
167 }
168
169 /*
170 * For an ahash tfm that is using an shash algorithm (instead of an ahash
171 * algorithm), this returns the underlying shash tfm.
172 */
ahash_to_shash(struct crypto_ahash * tfm)173 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
174 {
175 return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
176 }
177
prepare_shash_desc(struct ahash_request * req,struct crypto_ahash * tfm)178 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
179 struct crypto_ahash *tfm)
180 {
181 struct shash_desc *desc = ahash_request_ctx(req);
182
183 desc->tfm = ahash_to_shash(tfm);
184 return desc;
185 }
186
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)187 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
188 {
189 struct crypto_hash_walk walk;
190 int nbytes;
191
192 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
193 nbytes = crypto_hash_walk_done(&walk, nbytes))
194 nbytes = crypto_shash_update(desc, walk.data, nbytes);
195
196 return nbytes;
197 }
198 EXPORT_SYMBOL_GPL(shash_ahash_update);
199
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)200 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
201 {
202 struct crypto_hash_walk walk;
203 int nbytes;
204
205 nbytes = crypto_hash_walk_first(req, &walk);
206 if (!nbytes)
207 return crypto_shash_final(desc, req->result);
208
209 do {
210 nbytes = crypto_hash_walk_last(&walk) ?
211 crypto_shash_finup(desc, walk.data, nbytes,
212 req->result) :
213 crypto_shash_update(desc, walk.data, nbytes);
214 nbytes = crypto_hash_walk_done(&walk, nbytes);
215 } while (nbytes > 0);
216
217 return nbytes;
218 }
219 EXPORT_SYMBOL_GPL(shash_ahash_finup);
220
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)221 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
222 {
223 unsigned int nbytes = req->nbytes;
224 struct scatterlist *sg;
225 unsigned int offset;
226 struct page *page;
227 const u8 *data;
228 int err;
229
230 data = req->svirt;
231 if (!nbytes || ahash_request_isvirt(req))
232 return crypto_shash_digest(desc, data, nbytes, req->result);
233
234 sg = req->src;
235 if (nbytes > sg->length)
236 return crypto_shash_init(desc) ?:
237 shash_ahash_finup(req, desc);
238
239 page = sg_page(sg);
240 offset = sg->offset;
241 data = lowmem_page_address(page) + offset;
242 if (!IS_ENABLED(CONFIG_HIGHMEM))
243 return crypto_shash_digest(desc, data, nbytes, req->result);
244
245 page = nth_page(page, offset >> PAGE_SHIFT);
246 offset = offset_in_page(offset);
247
248 if (nbytes > (unsigned int)PAGE_SIZE - offset)
249 return crypto_shash_init(desc) ?:
250 shash_ahash_finup(req, desc);
251
252 data = kmap_local_page(page);
253 err = crypto_shash_digest(desc, data + offset, nbytes,
254 req->result);
255 kunmap_local(data);
256 return err;
257 }
258 EXPORT_SYMBOL_GPL(shash_ahash_digest);
259
crypto_exit_ahash_using_shash(struct crypto_tfm * tfm)260 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
261 {
262 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
263
264 crypto_free_shash(*ctx);
265 }
266
crypto_init_ahash_using_shash(struct crypto_tfm * tfm)267 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
268 {
269 struct crypto_alg *calg = tfm->__crt_alg;
270 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
271 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
272 struct crypto_shash *shash;
273
274 if (!crypto_mod_get(calg))
275 return -EAGAIN;
276
277 shash = crypto_create_tfm(calg, &crypto_shash_type);
278 if (IS_ERR(shash)) {
279 crypto_mod_put(calg);
280 return PTR_ERR(shash);
281 }
282
283 crt->using_shash = true;
284 *ctx = shash;
285 tfm->exit = crypto_exit_ahash_using_shash;
286
287 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
288 CRYPTO_TFM_NEED_KEY);
289
290 return 0;
291 }
292
ahash_nosetkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)293 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
294 unsigned int keylen)
295 {
296 return -ENOSYS;
297 }
298
ahash_set_needkey(struct crypto_ahash * tfm,struct ahash_alg * alg)299 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
300 {
301 if (alg->setkey != ahash_nosetkey &&
302 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
303 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
304 }
305
crypto_ahash_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)306 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
307 unsigned int keylen)
308 {
309 if (likely(tfm->using_shash)) {
310 struct crypto_shash *shash = ahash_to_shash(tfm);
311 int err;
312
313 err = crypto_shash_setkey(shash, key, keylen);
314 if (unlikely(err)) {
315 crypto_ahash_set_flags(tfm,
316 crypto_shash_get_flags(shash) &
317 CRYPTO_TFM_NEED_KEY);
318 return err;
319 }
320 } else {
321 struct ahash_alg *alg = crypto_ahash_alg(tfm);
322 int err;
323
324 err = alg->setkey(tfm, key, keylen);
325 if (!err && crypto_ahash_need_fallback(tfm))
326 err = crypto_ahash_setkey(crypto_ahash_fb(tfm),
327 key, keylen);
328 if (unlikely(err)) {
329 ahash_set_needkey(tfm, alg);
330 return err;
331 }
332 }
333 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
334 return 0;
335 }
336 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
337
ahash_do_req_chain(struct ahash_request * req,int (* const * op)(struct ahash_request * req))338 static int ahash_do_req_chain(struct ahash_request *req,
339 int (*const *op)(struct ahash_request *req))
340 {
341 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
342 int err;
343
344 if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req))
345 return (*op)(req);
346
347 if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE)
348 return -ENOSYS;
349
350 {
351 u8 state[HASH_MAX_STATESIZE];
352
353 if (op == &crypto_ahash_alg(tfm)->digest) {
354 ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
355 err = crypto_ahash_digest(req);
356 goto out_no_state;
357 }
358
359 err = crypto_ahash_export(req, state);
360 ahash_request_set_tfm(req, crypto_ahash_fb(tfm));
361 err = err ?: crypto_ahash_import(req, state);
362
363 if (op == &crypto_ahash_alg(tfm)->finup) {
364 err = err ?: crypto_ahash_finup(req);
365 goto out_no_state;
366 }
367
368 err = err ?:
369 crypto_ahash_update(req) ?:
370 crypto_ahash_export(req, state);
371
372 ahash_request_set_tfm(req, tfm);
373 return err ?: crypto_ahash_import(req, state);
374
375 out_no_state:
376 ahash_request_set_tfm(req, tfm);
377 return err;
378 }
379 }
380
crypto_ahash_init(struct ahash_request * req)381 int crypto_ahash_init(struct ahash_request *req)
382 {
383 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
384
385 if (likely(tfm->using_shash))
386 return crypto_shash_init(prepare_shash_desc(req, tfm));
387 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
388 return -ENOKEY;
389 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
390 return -EAGAIN;
391 if (crypto_ahash_block_only(tfm)) {
392 u8 *buf = ahash_request_ctx(req);
393
394 buf += crypto_ahash_reqsize(tfm) - 1;
395 *buf = 0;
396 }
397 return crypto_ahash_alg(tfm)->init(req);
398 }
399 EXPORT_SYMBOL_GPL(crypto_ahash_init);
400
ahash_save_req(struct ahash_request * req,crypto_completion_t cplt)401 static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
402 {
403 req->saved_complete = req->base.complete;
404 req->saved_data = req->base.data;
405 req->base.complete = cplt;
406 req->base.data = req;
407 }
408
ahash_restore_req(struct ahash_request * req)409 static void ahash_restore_req(struct ahash_request *req)
410 {
411 req->base.complete = req->saved_complete;
412 req->base.data = req->saved_data;
413 }
414
ahash_update_finish(struct ahash_request * req,int err)415 static int ahash_update_finish(struct ahash_request *req, int err)
416 {
417 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
418 bool nonzero = crypto_ahash_final_nonzero(tfm);
419 int bs = crypto_ahash_blocksize(tfm);
420 u8 *blenp = ahash_request_ctx(req);
421 int blen;
422 u8 *buf;
423
424 blenp += crypto_ahash_reqsize(tfm) - 1;
425 blen = *blenp;
426 buf = blenp - bs;
427
428 if (blen) {
429 req->src = req->sg_head + 1;
430 if (sg_is_chain(req->src))
431 req->src = sg_chain_ptr(req->src);
432 }
433
434 req->nbytes += nonzero - blen;
435
436 blen = err < 0 ? 0 : err + nonzero;
437 if (ahash_request_isvirt(req))
438 memcpy(buf, req->svirt + req->nbytes - blen, blen);
439 else
440 memcpy_from_sglist(buf, req->src, req->nbytes - blen, blen);
441 *blenp = blen;
442
443 ahash_restore_req(req);
444
445 return err;
446 }
447
ahash_update_done(void * data,int err)448 static void ahash_update_done(void *data, int err)
449 {
450 ahash_op_done(data, err, ahash_update_finish);
451 }
452
crypto_ahash_update(struct ahash_request * req)453 int crypto_ahash_update(struct ahash_request *req)
454 {
455 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
456 bool nonzero = crypto_ahash_final_nonzero(tfm);
457 int bs = crypto_ahash_blocksize(tfm);
458 u8 *blenp = ahash_request_ctx(req);
459 int blen, err;
460 u8 *buf;
461
462 if (likely(tfm->using_shash))
463 return shash_ahash_update(req, ahash_request_ctx(req));
464 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
465 return -EAGAIN;
466 if (!crypto_ahash_block_only(tfm))
467 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
468
469 blenp += crypto_ahash_reqsize(tfm) - 1;
470 blen = *blenp;
471 buf = blenp - bs;
472
473 if (blen + req->nbytes < bs + nonzero) {
474 if (ahash_request_isvirt(req))
475 memcpy(buf + blen, req->svirt, req->nbytes);
476 else
477 memcpy_from_sglist(buf + blen, req->src, 0,
478 req->nbytes);
479
480 *blenp += req->nbytes;
481 return 0;
482 }
483
484 if (blen) {
485 memset(req->sg_head, 0, sizeof(req->sg_head[0]));
486 sg_set_buf(req->sg_head, buf, blen);
487 if (req->src != req->sg_head + 1)
488 sg_chain(req->sg_head, 2, req->src);
489 req->src = req->sg_head;
490 req->nbytes += blen;
491 }
492 req->nbytes -= nonzero;
493
494 ahash_save_req(req, ahash_update_done);
495
496 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->update);
497 if (err == -EINPROGRESS || err == -EBUSY)
498 return err;
499
500 return ahash_update_finish(req, err);
501 }
502 EXPORT_SYMBOL_GPL(crypto_ahash_update);
503
ahash_finup_finish(struct ahash_request * req,int err)504 static int ahash_finup_finish(struct ahash_request *req, int err)
505 {
506 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
507 u8 *blenp = ahash_request_ctx(req);
508 int blen;
509
510 blenp += crypto_ahash_reqsize(tfm) - 1;
511 blen = *blenp;
512
513 if (blen) {
514 if (sg_is_last(req->src))
515 req->src = NULL;
516 else {
517 req->src = req->sg_head + 1;
518 if (sg_is_chain(req->src))
519 req->src = sg_chain_ptr(req->src);
520 }
521 req->nbytes -= blen;
522 }
523
524 ahash_restore_req(req);
525
526 return err;
527 }
528
ahash_finup_done(void * data,int err)529 static void ahash_finup_done(void *data, int err)
530 {
531 ahash_op_done(data, err, ahash_finup_finish);
532 }
533
crypto_ahash_finup(struct ahash_request * req)534 int crypto_ahash_finup(struct ahash_request *req)
535 {
536 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
537 int bs = crypto_ahash_blocksize(tfm);
538 u8 *blenp = ahash_request_ctx(req);
539 int blen, err;
540 u8 *buf;
541
542 if (likely(tfm->using_shash))
543 return shash_ahash_finup(req, ahash_request_ctx(req));
544 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
545 return -EAGAIN;
546 if (!crypto_ahash_alg(tfm)->finup)
547 return ahash_def_finup(req);
548 if (!crypto_ahash_block_only(tfm))
549 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
550
551 blenp += crypto_ahash_reqsize(tfm) - 1;
552 blen = *blenp;
553 buf = blenp - bs;
554
555 if (blen) {
556 memset(req->sg_head, 0, sizeof(req->sg_head[0]));
557 sg_set_buf(req->sg_head, buf, blen);
558 if (!req->src)
559 sg_mark_end(req->sg_head);
560 else if (req->src != req->sg_head + 1)
561 sg_chain(req->sg_head, 2, req->src);
562 req->src = req->sg_head;
563 req->nbytes += blen;
564 }
565
566 ahash_save_req(req, ahash_finup_done);
567
568 err = ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->finup);
569 if (err == -EINPROGRESS || err == -EBUSY)
570 return err;
571
572 return ahash_finup_finish(req, err);
573 }
574 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
575
crypto_ahash_digest(struct ahash_request * req)576 int crypto_ahash_digest(struct ahash_request *req)
577 {
578 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
579
580 if (likely(tfm->using_shash))
581 return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
582 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
583 return -EAGAIN;
584 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
585 return -ENOKEY;
586 return ahash_do_req_chain(req, &crypto_ahash_alg(tfm)->digest);
587 }
588 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
589
ahash_def_finup_done2(void * data,int err)590 static void ahash_def_finup_done2(void *data, int err)
591 {
592 struct ahash_request *areq = data;
593
594 if (err == -EINPROGRESS)
595 return;
596
597 ahash_restore_req(areq);
598 ahash_request_complete(areq, err);
599 }
600
ahash_def_finup_finish1(struct ahash_request * req,int err)601 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
602 {
603 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
604
605 if (err)
606 goto out;
607
608 req->base.complete = ahash_def_finup_done2;
609
610 err = crypto_ahash_alg(tfm)->final(req);
611 if (err == -EINPROGRESS || err == -EBUSY)
612 return err;
613
614 out:
615 ahash_restore_req(req);
616 return err;
617 }
618
ahash_def_finup_done1(void * data,int err)619 static void ahash_def_finup_done1(void *data, int err)
620 {
621 ahash_op_done(data, err, ahash_def_finup_finish1);
622 }
623
ahash_def_finup(struct ahash_request * req)624 static int ahash_def_finup(struct ahash_request *req)
625 {
626 int err;
627
628 ahash_save_req(req, ahash_def_finup_done1);
629
630 err = crypto_ahash_update(req);
631 if (err == -EINPROGRESS || err == -EBUSY)
632 return err;
633
634 return ahash_def_finup_finish1(req, err);
635 }
636
crypto_ahash_export_core(struct ahash_request * req,void * out)637 int crypto_ahash_export_core(struct ahash_request *req, void *out)
638 {
639 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
640
641 if (likely(tfm->using_shash))
642 return crypto_shash_export_core(ahash_request_ctx(req), out);
643 return crypto_ahash_alg(tfm)->export_core(req, out);
644 }
645 EXPORT_SYMBOL_GPL(crypto_ahash_export_core);
646
crypto_ahash_export(struct ahash_request * req,void * out)647 int crypto_ahash_export(struct ahash_request *req, void *out)
648 {
649 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
650
651 if (likely(tfm->using_shash))
652 return crypto_shash_export(ahash_request_ctx(req), out);
653 if (crypto_ahash_block_only(tfm)) {
654 unsigned int plen = crypto_ahash_blocksize(tfm) + 1;
655 unsigned int reqsize = crypto_ahash_reqsize(tfm);
656 unsigned int ss = crypto_ahash_statesize(tfm);
657 u8 *buf = ahash_request_ctx(req);
658
659 memcpy(out + ss - plen, buf + reqsize - plen, plen);
660 }
661 return crypto_ahash_alg(tfm)->export(req, out);
662 }
663 EXPORT_SYMBOL_GPL(crypto_ahash_export);
664
crypto_ahash_import_core(struct ahash_request * req,const void * in)665 int crypto_ahash_import_core(struct ahash_request *req, const void *in)
666 {
667 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
668
669 if (likely(tfm->using_shash))
670 return crypto_shash_import_core(prepare_shash_desc(req, tfm),
671 in);
672 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
673 return -ENOKEY;
674 return crypto_ahash_alg(tfm)->import_core(req, in);
675 }
676 EXPORT_SYMBOL_GPL(crypto_ahash_import_core);
677
crypto_ahash_import(struct ahash_request * req,const void * in)678 int crypto_ahash_import(struct ahash_request *req, const void *in)
679 {
680 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
681
682 if (likely(tfm->using_shash))
683 return crypto_shash_import(prepare_shash_desc(req, tfm), in);
684 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
685 return -ENOKEY;
686 if (crypto_ahash_block_only(tfm)) {
687 unsigned int reqsize = crypto_ahash_reqsize(tfm);
688 u8 *buf = ahash_request_ctx(req);
689
690 buf[reqsize - 1] = 0;
691 }
692 return crypto_ahash_alg(tfm)->import(req, in);
693 }
694 EXPORT_SYMBOL_GPL(crypto_ahash_import);
695
crypto_ahash_exit_tfm(struct crypto_tfm * tfm)696 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
697 {
698 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
699 struct ahash_alg *alg = crypto_ahash_alg(hash);
700
701 if (alg->exit_tfm)
702 alg->exit_tfm(hash);
703 else if (tfm->__crt_alg->cra_exit)
704 tfm->__crt_alg->cra_exit(tfm);
705
706 if (crypto_ahash_need_fallback(hash))
707 crypto_free_ahash(crypto_ahash_fb(hash));
708 }
709
crypto_ahash_init_tfm(struct crypto_tfm * tfm)710 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
711 {
712 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
713 struct ahash_alg *alg = crypto_ahash_alg(hash);
714 struct crypto_ahash *fb = NULL;
715 int err;
716
717 crypto_ahash_set_statesize(hash, alg->halg.statesize);
718 crypto_ahash_set_reqsize(hash, crypto_tfm_alg_reqsize(tfm));
719
720 if (tfm->__crt_alg->cra_type == &crypto_shash_type)
721 return crypto_init_ahash_using_shash(tfm);
722
723 if (crypto_ahash_need_fallback(hash)) {
724 fb = crypto_alloc_ahash(crypto_ahash_alg_name(hash),
725 CRYPTO_ALG_REQ_VIRT,
726 CRYPTO_ALG_ASYNC |
727 CRYPTO_ALG_REQ_VIRT |
728 CRYPTO_AHASH_ALG_NO_EXPORT_CORE);
729 if (IS_ERR(fb))
730 return PTR_ERR(fb);
731
732 tfm->fb = crypto_ahash_tfm(fb);
733 }
734
735 ahash_set_needkey(hash, alg);
736
737 tfm->exit = crypto_ahash_exit_tfm;
738
739 if (alg->init_tfm)
740 err = alg->init_tfm(hash);
741 else if (tfm->__crt_alg->cra_init)
742 err = tfm->__crt_alg->cra_init(tfm);
743 else
744 return 0;
745
746 if (err)
747 goto out_free_sync_hash;
748
749 if (!ahash_is_async(hash) && crypto_ahash_reqsize(hash) >
750 MAX_SYNC_HASH_REQSIZE)
751 goto out_exit_tfm;
752
753 BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE);
754 if (crypto_ahash_reqsize(hash) < HASH_MAX_DESCSIZE)
755 crypto_ahash_set_reqsize(hash, HASH_MAX_DESCSIZE);
756
757 return 0;
758
759 out_exit_tfm:
760 if (alg->exit_tfm)
761 alg->exit_tfm(hash);
762 else if (tfm->__crt_alg->cra_exit)
763 tfm->__crt_alg->cra_exit(tfm);
764 err = -EINVAL;
765 out_free_sync_hash:
766 crypto_free_ahash(fb);
767 return err;
768 }
769
crypto_ahash_extsize(struct crypto_alg * alg)770 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
771 {
772 if (alg->cra_type == &crypto_shash_type)
773 return sizeof(struct crypto_shash *);
774
775 return crypto_alg_extsize(alg);
776 }
777
crypto_ahash_free_instance(struct crypto_instance * inst)778 static void crypto_ahash_free_instance(struct crypto_instance *inst)
779 {
780 struct ahash_instance *ahash = ahash_instance(inst);
781
782 ahash->free(ahash);
783 }
784
crypto_ahash_report(struct sk_buff * skb,struct crypto_alg * alg)785 static int __maybe_unused crypto_ahash_report(
786 struct sk_buff *skb, struct crypto_alg *alg)
787 {
788 struct crypto_report_hash rhash;
789
790 memset(&rhash, 0, sizeof(rhash));
791
792 strscpy(rhash.type, "ahash", sizeof(rhash.type));
793
794 rhash.blocksize = alg->cra_blocksize;
795 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
796
797 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
798 }
799
800 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
801 __maybe_unused;
crypto_ahash_show(struct seq_file * m,struct crypto_alg * alg)802 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
803 {
804 seq_printf(m, "type : ahash\n");
805 seq_printf(m, "async : %s\n",
806 str_yes_no(alg->cra_flags & CRYPTO_ALG_ASYNC));
807 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
808 seq_printf(m, "digestsize : %u\n",
809 __crypto_hash_alg_common(alg)->digestsize);
810 }
811
812 static const struct crypto_type crypto_ahash_type = {
813 .extsize = crypto_ahash_extsize,
814 .init_tfm = crypto_ahash_init_tfm,
815 .free = crypto_ahash_free_instance,
816 #ifdef CONFIG_PROC_FS
817 .show = crypto_ahash_show,
818 #endif
819 #if IS_ENABLED(CONFIG_CRYPTO_USER)
820 .report = crypto_ahash_report,
821 #endif
822 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
823 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
824 .type = CRYPTO_ALG_TYPE_AHASH,
825 .tfmsize = offsetof(struct crypto_ahash, base),
826 .algsize = offsetof(struct ahash_alg, halg.base),
827 };
828
crypto_grab_ahash(struct crypto_ahash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)829 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
830 struct crypto_instance *inst,
831 const char *name, u32 type, u32 mask)
832 {
833 spawn->base.frontend = &crypto_ahash_type;
834 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
835 }
836 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
837
crypto_alloc_ahash(const char * alg_name,u32 type,u32 mask)838 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
839 u32 mask)
840 {
841 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
842 }
843 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
844
crypto_has_ahash(const char * alg_name,u32 type,u32 mask)845 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
846 {
847 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
848 }
849 EXPORT_SYMBOL_GPL(crypto_has_ahash);
850
crypto_hash_alg_has_setkey(struct hash_alg_common * halg)851 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
852 {
853 struct crypto_alg *alg = &halg->base;
854
855 if (alg->cra_type == &crypto_shash_type)
856 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
857
858 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
859 }
860 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
861
crypto_clone_ahash(struct crypto_ahash * hash)862 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
863 {
864 struct hash_alg_common *halg = crypto_hash_alg_common(hash);
865 struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
866 struct crypto_ahash *fb = NULL;
867 struct crypto_ahash *nhash;
868 struct ahash_alg *alg;
869 int err;
870
871 if (!crypto_hash_alg_has_setkey(halg)) {
872 tfm = crypto_tfm_get(tfm);
873 if (IS_ERR(tfm))
874 return ERR_CAST(tfm);
875
876 return hash;
877 }
878
879 nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
880
881 if (IS_ERR(nhash))
882 return nhash;
883
884 nhash->reqsize = hash->reqsize;
885 nhash->statesize = hash->statesize;
886
887 if (likely(hash->using_shash)) {
888 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
889 struct crypto_shash *shash;
890
891 shash = crypto_clone_shash(ahash_to_shash(hash));
892 if (IS_ERR(shash)) {
893 err = PTR_ERR(shash);
894 goto out_free_nhash;
895 }
896 crypto_ahash_tfm(nhash)->exit = crypto_exit_ahash_using_shash;
897 nhash->using_shash = true;
898 *nctx = shash;
899 return nhash;
900 }
901
902 if (crypto_ahash_need_fallback(hash)) {
903 fb = crypto_clone_ahash(crypto_ahash_fb(hash));
904 err = PTR_ERR(fb);
905 if (IS_ERR(fb))
906 goto out_free_nhash;
907
908 crypto_ahash_tfm(nhash)->fb = crypto_ahash_tfm(fb);
909 }
910
911 err = -ENOSYS;
912 alg = crypto_ahash_alg(hash);
913 if (!alg->clone_tfm)
914 goto out_free_fb;
915
916 err = alg->clone_tfm(nhash, hash);
917 if (err)
918 goto out_free_fb;
919
920 crypto_ahash_tfm(nhash)->exit = crypto_ahash_exit_tfm;
921
922 return nhash;
923
924 out_free_fb:
925 crypto_free_ahash(fb);
926 out_free_nhash:
927 crypto_free_ahash(nhash);
928 return ERR_PTR(err);
929 }
930 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
931
ahash_default_export_core(struct ahash_request * req,void * out)932 static int ahash_default_export_core(struct ahash_request *req, void *out)
933 {
934 return -ENOSYS;
935 }
936
ahash_default_import_core(struct ahash_request * req,const void * in)937 static int ahash_default_import_core(struct ahash_request *req, const void *in)
938 {
939 return -ENOSYS;
940 }
941
ahash_prepare_alg(struct ahash_alg * alg)942 static int ahash_prepare_alg(struct ahash_alg *alg)
943 {
944 struct crypto_alg *base = &alg->halg.base;
945 int err;
946
947 if (alg->halg.statesize == 0)
948 return -EINVAL;
949
950 if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize)
951 return -EINVAL;
952
953 if (!(base->cra_flags & CRYPTO_ALG_ASYNC) &&
954 base->cra_reqsize > MAX_SYNC_HASH_REQSIZE)
955 return -EINVAL;
956
957 err = hash_prepare_alg(&alg->halg);
958 if (err)
959 return err;
960
961 base->cra_type = &crypto_ahash_type;
962 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
963
964 if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) &
965 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT))
966 base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK;
967
968 if (!alg->setkey)
969 alg->setkey = ahash_nosetkey;
970
971 if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) {
972 BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256);
973 if (!alg->finup)
974 return -EINVAL;
975
976 base->cra_reqsize += base->cra_blocksize + 1;
977 alg->halg.statesize += base->cra_blocksize + 1;
978 alg->export_core = alg->export;
979 alg->import_core = alg->import;
980 } else if (!alg->export_core || !alg->import_core) {
981 alg->export_core = ahash_default_export_core;
982 alg->import_core = ahash_default_import_core;
983 base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE;
984 }
985
986 return 0;
987 }
988
crypto_register_ahash(struct ahash_alg * alg)989 int crypto_register_ahash(struct ahash_alg *alg)
990 {
991 struct crypto_alg *base = &alg->halg.base;
992 int err;
993
994 err = ahash_prepare_alg(alg);
995 if (err)
996 return err;
997
998 return crypto_register_alg(base);
999 }
1000 EXPORT_SYMBOL_GPL(crypto_register_ahash);
1001
crypto_unregister_ahash(struct ahash_alg * alg)1002 void crypto_unregister_ahash(struct ahash_alg *alg)
1003 {
1004 crypto_unregister_alg(&alg->halg.base);
1005 }
1006 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
1007
crypto_register_ahashes(struct ahash_alg * algs,int count)1008 int crypto_register_ahashes(struct ahash_alg *algs, int count)
1009 {
1010 int i, ret;
1011
1012 for (i = 0; i < count; i++) {
1013 ret = crypto_register_ahash(&algs[i]);
1014 if (ret)
1015 goto err;
1016 }
1017
1018 return 0;
1019
1020 err:
1021 for (--i; i >= 0; --i)
1022 crypto_unregister_ahash(&algs[i]);
1023
1024 return ret;
1025 }
1026 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
1027
crypto_unregister_ahashes(struct ahash_alg * algs,int count)1028 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
1029 {
1030 int i;
1031
1032 for (i = count - 1; i >= 0; --i)
1033 crypto_unregister_ahash(&algs[i]);
1034 }
1035 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
1036
ahash_register_instance(struct crypto_template * tmpl,struct ahash_instance * inst)1037 int ahash_register_instance(struct crypto_template *tmpl,
1038 struct ahash_instance *inst)
1039 {
1040 int err;
1041
1042 if (WARN_ON(!inst->free))
1043 return -EINVAL;
1044
1045 err = ahash_prepare_alg(&inst->alg);
1046 if (err)
1047 return err;
1048
1049 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
1050 }
1051 EXPORT_SYMBOL_GPL(ahash_register_instance);
1052
ahash_request_free(struct ahash_request * req)1053 void ahash_request_free(struct ahash_request *req)
1054 {
1055 if (unlikely(!req))
1056 return;
1057
1058 if (!ahash_req_on_stack(req)) {
1059 kfree(req);
1060 return;
1061 }
1062
1063 ahash_request_zero(req);
1064 }
1065 EXPORT_SYMBOL_GPL(ahash_request_free);
1066
crypto_hash_digest(struct crypto_ahash * tfm,const u8 * data,unsigned int len,u8 * out)1067 int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data,
1068 unsigned int len, u8 *out)
1069 {
1070 HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm));
1071 int err;
1072
1073 ahash_request_set_callback(req, 0, NULL, NULL);
1074 ahash_request_set_virt(req, data, out, len);
1075 err = crypto_ahash_digest(req);
1076
1077 ahash_request_zero(req);
1078
1079 return err;
1080 }
1081 EXPORT_SYMBOL_GPL(crypto_hash_digest);
1082
ahash_free_singlespawn_instance(struct ahash_instance * inst)1083 void ahash_free_singlespawn_instance(struct ahash_instance *inst)
1084 {
1085 crypto_drop_spawn(ahash_instance_ctx(inst));
1086 kfree(inst);
1087 }
1088 EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance);
1089
1090 MODULE_LICENSE("GPL");
1091 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
1092