xref: /linux/crypto/ahash.c (revision c626910f3f1bbce6ad18bc613d895d2a089ed95e)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the asynchronous version of hash.c with notification of
6  * completion via a callback.
7  *
8  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
9  */
10 
11 #include <crypto/scatterwalk.h>
12 #include <linux/cryptouser.h>
13 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/seq_file.h>
19 #include <linux/string.h>
20 #include <net/netlink.h>
21 
22 #include "hash.h"
23 
24 #define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
25 
26 static const struct crypto_type crypto_ahash_type;
27 
28 struct ahash_request_priv {
29 	crypto_completion_t complete;
30 	void *data;
31 	u8 *result;
32 	u32 flags;
33 	void *ubuf[] CRYPTO_MINALIGN_ATTR;
34 };
35 
36 static int hash_walk_next(struct crypto_hash_walk *walk)
37 {
38 	unsigned int offset = walk->offset;
39 	unsigned int nbytes = min(walk->entrylen,
40 				  ((unsigned int)(PAGE_SIZE)) - offset);
41 
42 	walk->data = kmap_local_page(walk->pg);
43 	walk->data += offset;
44 	walk->entrylen -= nbytes;
45 	return nbytes;
46 }
47 
48 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
49 {
50 	struct scatterlist *sg;
51 
52 	sg = walk->sg;
53 	walk->offset = sg->offset;
54 	walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
55 	walk->offset = offset_in_page(walk->offset);
56 	walk->entrylen = sg->length;
57 
58 	if (walk->entrylen > walk->total)
59 		walk->entrylen = walk->total;
60 	walk->total -= walk->entrylen;
61 
62 	return hash_walk_next(walk);
63 }
64 
65 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
66 {
67 	walk->data -= walk->offset;
68 
69 	kunmap_local(walk->data);
70 	crypto_yield(walk->flags);
71 
72 	if (err)
73 		return err;
74 
75 	if (walk->entrylen) {
76 		walk->offset = 0;
77 		walk->pg++;
78 		return hash_walk_next(walk);
79 	}
80 
81 	if (!walk->total)
82 		return 0;
83 
84 	walk->sg = sg_next(walk->sg);
85 
86 	return hash_walk_new_entry(walk);
87 }
88 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
89 
90 int crypto_hash_walk_first(struct ahash_request *req,
91 			   struct crypto_hash_walk *walk)
92 {
93 	walk->total = req->nbytes;
94 
95 	if (!walk->total) {
96 		walk->entrylen = 0;
97 		return 0;
98 	}
99 
100 	walk->sg = req->src;
101 	walk->flags = req->base.flags;
102 
103 	return hash_walk_new_entry(walk);
104 }
105 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
106 
107 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
108 			  unsigned int keylen)
109 {
110 	return -ENOSYS;
111 }
112 
113 static void ahash_set_needkey(struct crypto_ahash *tfm)
114 {
115 	const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
116 
117 	if (tfm->setkey != ahash_nosetkey &&
118 	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
119 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
120 }
121 
122 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
123 			unsigned int keylen)
124 {
125 	int err = tfm->setkey(tfm, key, keylen);
126 
127 	if (unlikely(err)) {
128 		ahash_set_needkey(tfm);
129 		return err;
130 	}
131 
132 	crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
133 	return 0;
134 }
135 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
136 
137 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
138 			  bool has_state)
139 {
140 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
141 	unsigned int ds = crypto_ahash_digestsize(tfm);
142 	struct ahash_request *subreq;
143 	unsigned int subreq_size;
144 	unsigned int reqsize;
145 	u8 *result;
146 	gfp_t gfp;
147 	u32 flags;
148 
149 	subreq_size = sizeof(*subreq);
150 	reqsize = crypto_ahash_reqsize(tfm);
151 	reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
152 	subreq_size += reqsize;
153 	subreq_size += ds;
154 
155 	flags = ahash_request_flags(req);
156 	gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?  GFP_KERNEL : GFP_ATOMIC;
157 	subreq = kmalloc(subreq_size, gfp);
158 	if (!subreq)
159 		return -ENOMEM;
160 
161 	ahash_request_set_tfm(subreq, tfm);
162 	ahash_request_set_callback(subreq, flags, cplt, req);
163 
164 	result = (u8 *)(subreq + 1) + reqsize;
165 
166 	ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
167 
168 	if (has_state) {
169 		void *state;
170 
171 		state = kmalloc(crypto_ahash_statesize(tfm), gfp);
172 		if (!state) {
173 			kfree(subreq);
174 			return -ENOMEM;
175 		}
176 
177 		crypto_ahash_export(req, state);
178 		crypto_ahash_import(subreq, state);
179 		kfree_sensitive(state);
180 	}
181 
182 	req->priv = subreq;
183 
184 	return 0;
185 }
186 
187 static void ahash_restore_req(struct ahash_request *req, int err)
188 {
189 	struct ahash_request *subreq = req->priv;
190 
191 	if (!err)
192 		memcpy(req->result, subreq->result,
193 		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
194 
195 	req->priv = NULL;
196 
197 	kfree_sensitive(subreq);
198 }
199 
200 int crypto_ahash_final(struct ahash_request *req)
201 {
202 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
203 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
204 
205 	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
206 		atomic64_inc(&hash_get_stat(alg)->hash_cnt);
207 
208 	return crypto_hash_errstat(alg, tfm->final(req));
209 }
210 EXPORT_SYMBOL_GPL(crypto_ahash_final);
211 
212 int crypto_ahash_finup(struct ahash_request *req)
213 {
214 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
215 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
216 
217 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
218 		struct crypto_istat_hash *istat = hash_get_stat(alg);
219 
220 		atomic64_inc(&istat->hash_cnt);
221 		atomic64_add(req->nbytes, &istat->hash_tlen);
222 	}
223 
224 	return crypto_hash_errstat(alg, tfm->finup(req));
225 }
226 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
227 
228 int crypto_ahash_digest(struct ahash_request *req)
229 {
230 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
231 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
232 	int err;
233 
234 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
235 		struct crypto_istat_hash *istat = hash_get_stat(alg);
236 
237 		atomic64_inc(&istat->hash_cnt);
238 		atomic64_add(req->nbytes, &istat->hash_tlen);
239 	}
240 
241 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
242 		err = -ENOKEY;
243 	else
244 		err = tfm->digest(req);
245 
246 	return crypto_hash_errstat(alg, err);
247 }
248 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
249 
250 static void ahash_def_finup_done2(void *data, int err)
251 {
252 	struct ahash_request *areq = data;
253 
254 	if (err == -EINPROGRESS)
255 		return;
256 
257 	ahash_restore_req(areq, err);
258 
259 	ahash_request_complete(areq, err);
260 }
261 
262 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
263 {
264 	struct ahash_request *subreq = req->priv;
265 
266 	if (err)
267 		goto out;
268 
269 	subreq->base.complete = ahash_def_finup_done2;
270 
271 	err = crypto_ahash_reqtfm(req)->final(subreq);
272 	if (err == -EINPROGRESS || err == -EBUSY)
273 		return err;
274 
275 out:
276 	ahash_restore_req(req, err);
277 	return err;
278 }
279 
280 static void ahash_def_finup_done1(void *data, int err)
281 {
282 	struct ahash_request *areq = data;
283 	struct ahash_request *subreq;
284 
285 	if (err == -EINPROGRESS)
286 		goto out;
287 
288 	subreq = areq->priv;
289 	subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
290 
291 	err = ahash_def_finup_finish1(areq, err);
292 	if (err == -EINPROGRESS || err == -EBUSY)
293 		return;
294 
295 out:
296 	ahash_request_complete(areq, err);
297 }
298 
299 static int ahash_def_finup(struct ahash_request *req)
300 {
301 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
302 	int err;
303 
304 	err = ahash_save_req(req, ahash_def_finup_done1, true);
305 	if (err)
306 		return err;
307 
308 	err = tfm->update(req->priv);
309 	if (err == -EINPROGRESS || err == -EBUSY)
310 		return err;
311 
312 	return ahash_def_finup_finish1(req, err);
313 }
314 
315 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
316 {
317 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
318 	struct ahash_alg *alg = crypto_ahash_alg(hash);
319 
320 	alg->exit_tfm(hash);
321 }
322 
323 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
324 {
325 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
326 	struct ahash_alg *alg = crypto_ahash_alg(hash);
327 
328 	hash->setkey = ahash_nosetkey;
329 
330 	crypto_ahash_set_statesize(hash, alg->halg.statesize);
331 
332 	if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
333 		return crypto_init_shash_ops_async(tfm);
334 
335 	hash->init = alg->init;
336 	hash->update = alg->update;
337 	hash->final = alg->final;
338 	hash->finup = alg->finup ?: ahash_def_finup;
339 	hash->digest = alg->digest;
340 	hash->export = alg->export;
341 	hash->import = alg->import;
342 
343 	if (alg->setkey) {
344 		hash->setkey = alg->setkey;
345 		ahash_set_needkey(hash);
346 	}
347 
348 	if (alg->exit_tfm)
349 		tfm->exit = crypto_ahash_exit_tfm;
350 
351 	return alg->init_tfm ? alg->init_tfm(hash) : 0;
352 }
353 
354 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
355 {
356 	if (alg->cra_type != &crypto_ahash_type)
357 		return sizeof(struct crypto_shash *);
358 
359 	return crypto_alg_extsize(alg);
360 }
361 
362 static void crypto_ahash_free_instance(struct crypto_instance *inst)
363 {
364 	struct ahash_instance *ahash = ahash_instance(inst);
365 
366 	ahash->free(ahash);
367 }
368 
369 static int __maybe_unused crypto_ahash_report(
370 	struct sk_buff *skb, struct crypto_alg *alg)
371 {
372 	struct crypto_report_hash rhash;
373 
374 	memset(&rhash, 0, sizeof(rhash));
375 
376 	strscpy(rhash.type, "ahash", sizeof(rhash.type));
377 
378 	rhash.blocksize = alg->cra_blocksize;
379 	rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
380 
381 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
382 }
383 
384 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
385 	__maybe_unused;
386 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
387 {
388 	seq_printf(m, "type         : ahash\n");
389 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
390 					     "yes" : "no");
391 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
392 	seq_printf(m, "digestsize   : %u\n",
393 		   __crypto_hash_alg_common(alg)->digestsize);
394 }
395 
396 static int __maybe_unused crypto_ahash_report_stat(
397 	struct sk_buff *skb, struct crypto_alg *alg)
398 {
399 	return crypto_hash_report_stat(skb, alg, "ahash");
400 }
401 
402 static const struct crypto_type crypto_ahash_type = {
403 	.extsize = crypto_ahash_extsize,
404 	.init_tfm = crypto_ahash_init_tfm,
405 	.free = crypto_ahash_free_instance,
406 #ifdef CONFIG_PROC_FS
407 	.show = crypto_ahash_show,
408 #endif
409 #if IS_ENABLED(CONFIG_CRYPTO_USER)
410 	.report = crypto_ahash_report,
411 #endif
412 #ifdef CONFIG_CRYPTO_STATS
413 	.report_stat = crypto_ahash_report_stat,
414 #endif
415 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
416 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
417 	.type = CRYPTO_ALG_TYPE_AHASH,
418 	.tfmsize = offsetof(struct crypto_ahash, base),
419 };
420 
421 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
422 		      struct crypto_instance *inst,
423 		      const char *name, u32 type, u32 mask)
424 {
425 	spawn->base.frontend = &crypto_ahash_type;
426 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
427 }
428 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
429 
430 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
431 					u32 mask)
432 {
433 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
434 }
435 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
436 
437 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
438 {
439 	return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
440 }
441 EXPORT_SYMBOL_GPL(crypto_has_ahash);
442 
443 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
444 {
445 	struct hash_alg_common *halg = crypto_hash_alg_common(hash);
446 	struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
447 	struct crypto_ahash *nhash;
448 	struct ahash_alg *alg;
449 	int err;
450 
451 	if (!crypto_hash_alg_has_setkey(halg)) {
452 		tfm = crypto_tfm_get(tfm);
453 		if (IS_ERR(tfm))
454 			return ERR_CAST(tfm);
455 
456 		return hash;
457 	}
458 
459 	nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
460 
461 	if (IS_ERR(nhash))
462 		return nhash;
463 
464 	nhash->init = hash->init;
465 	nhash->update = hash->update;
466 	nhash->final = hash->final;
467 	nhash->finup = hash->finup;
468 	nhash->digest = hash->digest;
469 	nhash->export = hash->export;
470 	nhash->import = hash->import;
471 	nhash->setkey = hash->setkey;
472 	nhash->reqsize = hash->reqsize;
473 	nhash->statesize = hash->statesize;
474 
475 	if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
476 		return crypto_clone_shash_ops_async(nhash, hash);
477 
478 	err = -ENOSYS;
479 	alg = crypto_ahash_alg(hash);
480 	if (!alg->clone_tfm)
481 		goto out_free_nhash;
482 
483 	err = alg->clone_tfm(nhash, hash);
484 	if (err)
485 		goto out_free_nhash;
486 
487 	return nhash;
488 
489 out_free_nhash:
490 	crypto_free_ahash(nhash);
491 	return ERR_PTR(err);
492 }
493 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
494 
495 static int ahash_prepare_alg(struct ahash_alg *alg)
496 {
497 	struct crypto_alg *base = &alg->halg.base;
498 	int err;
499 
500 	if (alg->halg.statesize == 0)
501 		return -EINVAL;
502 
503 	err = hash_prepare_alg(&alg->halg);
504 	if (err)
505 		return err;
506 
507 	base->cra_type = &crypto_ahash_type;
508 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
509 
510 	return 0;
511 }
512 
513 int crypto_register_ahash(struct ahash_alg *alg)
514 {
515 	struct crypto_alg *base = &alg->halg.base;
516 	int err;
517 
518 	err = ahash_prepare_alg(alg);
519 	if (err)
520 		return err;
521 
522 	return crypto_register_alg(base);
523 }
524 EXPORT_SYMBOL_GPL(crypto_register_ahash);
525 
526 void crypto_unregister_ahash(struct ahash_alg *alg)
527 {
528 	crypto_unregister_alg(&alg->halg.base);
529 }
530 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
531 
532 int crypto_register_ahashes(struct ahash_alg *algs, int count)
533 {
534 	int i, ret;
535 
536 	for (i = 0; i < count; i++) {
537 		ret = crypto_register_ahash(&algs[i]);
538 		if (ret)
539 			goto err;
540 	}
541 
542 	return 0;
543 
544 err:
545 	for (--i; i >= 0; --i)
546 		crypto_unregister_ahash(&algs[i]);
547 
548 	return ret;
549 }
550 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
551 
552 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
553 {
554 	int i;
555 
556 	for (i = count - 1; i >= 0; --i)
557 		crypto_unregister_ahash(&algs[i]);
558 }
559 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
560 
561 int ahash_register_instance(struct crypto_template *tmpl,
562 			    struct ahash_instance *inst)
563 {
564 	int err;
565 
566 	if (WARN_ON(!inst->free))
567 		return -EINVAL;
568 
569 	err = ahash_prepare_alg(&inst->alg);
570 	if (err)
571 		return err;
572 
573 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
574 }
575 EXPORT_SYMBOL_GPL(ahash_register_instance);
576 
577 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
578 {
579 	struct crypto_alg *alg = &halg->base;
580 
581 	if (alg->cra_type != &crypto_ahash_type)
582 		return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
583 
584 	return __crypto_ahash_alg(alg)->setkey != NULL;
585 }
586 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
587 
588 MODULE_LICENSE("GPL");
589 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
590