xref: /linux/crypto/shash.c (revision d39d0ed196aa1685bb24771e92f78633c66ac9cb)
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 
21 #include "internal.h"
22 
23 static const struct crypto_type crypto_shash_type;
24 
25 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 			   unsigned int keylen)
27 {
28 	return -ENOSYS;
29 }
30 
31 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
32 				  unsigned int keylen)
33 {
34 	struct shash_alg *shash = crypto_shash_alg(tfm);
35 	unsigned long alignmask = crypto_shash_alignmask(tfm);
36 	unsigned long absize;
37 	u8 *buffer, *alignbuffer;
38 	int err;
39 
40 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
41 	buffer = kmalloc(absize, GFP_KERNEL);
42 	if (!buffer)
43 		return -ENOMEM;
44 
45 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
46 	memcpy(alignbuffer, key, keylen);
47 	err = shash->setkey(tfm, alignbuffer, keylen);
48 	kzfree(buffer);
49 	return err;
50 }
51 
52 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
53 			unsigned int keylen)
54 {
55 	struct shash_alg *shash = crypto_shash_alg(tfm);
56 	unsigned long alignmask = crypto_shash_alignmask(tfm);
57 
58 	if ((unsigned long)key & alignmask)
59 		return shash_setkey_unaligned(tfm, key, keylen);
60 
61 	return shash->setkey(tfm, key, keylen);
62 }
63 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
64 
65 static inline unsigned int shash_align_buffer_size(unsigned len,
66 						   unsigned long mask)
67 {
68 	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
69 }
70 
71 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
72 				  unsigned int len)
73 {
74 	struct crypto_shash *tfm = desc->tfm;
75 	struct shash_alg *shash = crypto_shash_alg(tfm);
76 	unsigned long alignmask = crypto_shash_alignmask(tfm);
77 	unsigned int unaligned_len = alignmask + 1 -
78 				     ((unsigned long)data & alignmask);
79 	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
80 		__attribute__ ((aligned));
81 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
82 	int err;
83 
84 	if (unaligned_len > len)
85 		unaligned_len = len;
86 
87 	memcpy(buf, data, unaligned_len);
88 	err = shash->update(desc, buf, unaligned_len);
89 	memset(buf, 0, unaligned_len);
90 
91 	return err ?:
92 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
93 }
94 
95 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
96 			unsigned int len)
97 {
98 	struct crypto_shash *tfm = desc->tfm;
99 	struct shash_alg *shash = crypto_shash_alg(tfm);
100 	unsigned long alignmask = crypto_shash_alignmask(tfm);
101 
102 	if ((unsigned long)data & alignmask)
103 		return shash_update_unaligned(desc, data, len);
104 
105 	return shash->update(desc, data, len);
106 }
107 EXPORT_SYMBOL_GPL(crypto_shash_update);
108 
109 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
110 {
111 	struct crypto_shash *tfm = desc->tfm;
112 	unsigned long alignmask = crypto_shash_alignmask(tfm);
113 	struct shash_alg *shash = crypto_shash_alg(tfm);
114 	unsigned int ds = crypto_shash_digestsize(tfm);
115 	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
116 		__attribute__ ((aligned));
117 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
118 	int err;
119 
120 	err = shash->final(desc, buf);
121 	if (err)
122 		goto out;
123 
124 	memcpy(out, buf, ds);
125 
126 out:
127 	memset(buf, 0, ds);
128 	return err;
129 }
130 
131 int crypto_shash_final(struct shash_desc *desc, u8 *out)
132 {
133 	struct crypto_shash *tfm = desc->tfm;
134 	struct shash_alg *shash = crypto_shash_alg(tfm);
135 	unsigned long alignmask = crypto_shash_alignmask(tfm);
136 
137 	if ((unsigned long)out & alignmask)
138 		return shash_final_unaligned(desc, out);
139 
140 	return shash->final(desc, out);
141 }
142 EXPORT_SYMBOL_GPL(crypto_shash_final);
143 
144 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
145 				 unsigned int len, u8 *out)
146 {
147 	return crypto_shash_update(desc, data, len) ?:
148 	       crypto_shash_final(desc, out);
149 }
150 
151 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
152 		       unsigned int len, u8 *out)
153 {
154 	struct crypto_shash *tfm = desc->tfm;
155 	struct shash_alg *shash = crypto_shash_alg(tfm);
156 	unsigned long alignmask = crypto_shash_alignmask(tfm);
157 
158 	if (((unsigned long)data | (unsigned long)out) & alignmask)
159 		return shash_finup_unaligned(desc, data, len, out);
160 
161 	return shash->finup(desc, data, len, out);
162 }
163 EXPORT_SYMBOL_GPL(crypto_shash_finup);
164 
165 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
166 				  unsigned int len, u8 *out)
167 {
168 	return crypto_shash_init(desc) ?:
169 	       crypto_shash_finup(desc, data, len, out);
170 }
171 
172 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
173 			unsigned int len, u8 *out)
174 {
175 	struct crypto_shash *tfm = desc->tfm;
176 	struct shash_alg *shash = crypto_shash_alg(tfm);
177 	unsigned long alignmask = crypto_shash_alignmask(tfm);
178 
179 	if (((unsigned long)data | (unsigned long)out) & alignmask)
180 		return shash_digest_unaligned(desc, data, len, out);
181 
182 	return shash->digest(desc, data, len, out);
183 }
184 EXPORT_SYMBOL_GPL(crypto_shash_digest);
185 
186 static int shash_default_export(struct shash_desc *desc, void *out)
187 {
188 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
189 	return 0;
190 }
191 
192 static int shash_default_import(struct shash_desc *desc, const void *in)
193 {
194 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
195 	return 0;
196 }
197 
198 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
199 			      unsigned int keylen)
200 {
201 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
202 
203 	return crypto_shash_setkey(*ctx, key, keylen);
204 }
205 
206 static int shash_async_init(struct ahash_request *req)
207 {
208 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
209 	struct shash_desc *desc = ahash_request_ctx(req);
210 
211 	desc->tfm = *ctx;
212 	desc->flags = req->base.flags;
213 
214 	return crypto_shash_init(desc);
215 }
216 
217 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
218 {
219 	struct crypto_hash_walk walk;
220 	int nbytes;
221 
222 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
223 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
224 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
225 
226 	return nbytes;
227 }
228 EXPORT_SYMBOL_GPL(shash_ahash_update);
229 
230 static int shash_async_update(struct ahash_request *req)
231 {
232 	return shash_ahash_update(req, ahash_request_ctx(req));
233 }
234 
235 static int shash_async_final(struct ahash_request *req)
236 {
237 	return crypto_shash_final(ahash_request_ctx(req), req->result);
238 }
239 
240 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
241 {
242 	struct crypto_hash_walk walk;
243 	int nbytes;
244 
245 	nbytes = crypto_hash_walk_first(req, &walk);
246 	if (!nbytes)
247 		return crypto_shash_final(desc, req->result);
248 
249 	do {
250 		nbytes = crypto_hash_walk_last(&walk) ?
251 			 crypto_shash_finup(desc, walk.data, nbytes,
252 					    req->result) :
253 			 crypto_shash_update(desc, walk.data, nbytes);
254 		nbytes = crypto_hash_walk_done(&walk, nbytes);
255 	} while (nbytes > 0);
256 
257 	return nbytes;
258 }
259 EXPORT_SYMBOL_GPL(shash_ahash_finup);
260 
261 static int shash_async_finup(struct ahash_request *req)
262 {
263 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
264 	struct shash_desc *desc = ahash_request_ctx(req);
265 
266 	desc->tfm = *ctx;
267 	desc->flags = req->base.flags;
268 
269 	return shash_ahash_finup(req, desc);
270 }
271 
272 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
273 {
274 	struct scatterlist *sg = req->src;
275 	unsigned int offset = sg->offset;
276 	unsigned int nbytes = req->nbytes;
277 	int err;
278 
279 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
280 		void *data;
281 
282 		data = crypto_kmap(sg_page(sg), 0);
283 		err = crypto_shash_digest(desc, data + offset, nbytes,
284 					  req->result);
285 		crypto_kunmap(data, 0);
286 		crypto_yield(desc->flags);
287 	} else
288 		err = crypto_shash_init(desc) ?:
289 		      shash_ahash_finup(req, desc);
290 
291 	return err;
292 }
293 EXPORT_SYMBOL_GPL(shash_ahash_digest);
294 
295 static int shash_async_digest(struct ahash_request *req)
296 {
297 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
298 	struct shash_desc *desc = ahash_request_ctx(req);
299 
300 	desc->tfm = *ctx;
301 	desc->flags = req->base.flags;
302 
303 	return shash_ahash_digest(req, desc);
304 }
305 
306 static int shash_async_export(struct ahash_request *req, void *out)
307 {
308 	return crypto_shash_export(ahash_request_ctx(req), out);
309 }
310 
311 static int shash_async_import(struct ahash_request *req, const void *in)
312 {
313 	return crypto_shash_import(ahash_request_ctx(req), in);
314 }
315 
316 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
317 {
318 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
319 
320 	crypto_free_shash(*ctx);
321 }
322 
323 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
324 {
325 	struct crypto_alg *calg = tfm->__crt_alg;
326 	struct shash_alg *alg = __crypto_shash_alg(calg);
327 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
328 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
329 	struct crypto_shash *shash;
330 
331 	if (!crypto_mod_get(calg))
332 		return -EAGAIN;
333 
334 	shash = crypto_create_tfm(calg, &crypto_shash_type);
335 	if (IS_ERR(shash)) {
336 		crypto_mod_put(calg);
337 		return PTR_ERR(shash);
338 	}
339 
340 	*ctx = shash;
341 	tfm->exit = crypto_exit_shash_ops_async;
342 
343 	crt->init = shash_async_init;
344 	crt->update = shash_async_update;
345 	crt->final = shash_async_final;
346 	crt->finup = shash_async_finup;
347 	crt->digest = shash_async_digest;
348 
349 	if (alg->setkey)
350 		crt->setkey = shash_async_setkey;
351 	if (alg->export)
352 		crt->export = shash_async_export;
353 	if (alg->import)
354 		crt->import = shash_async_import;
355 
356 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
357 
358 	return 0;
359 }
360 
361 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
362 			       unsigned int keylen)
363 {
364 	struct shash_desc **descp = crypto_hash_ctx(tfm);
365 	struct shash_desc *desc = *descp;
366 
367 	return crypto_shash_setkey(desc->tfm, key, keylen);
368 }
369 
370 static int shash_compat_init(struct hash_desc *hdesc)
371 {
372 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
373 	struct shash_desc *desc = *descp;
374 
375 	desc->flags = hdesc->flags;
376 
377 	return crypto_shash_init(desc);
378 }
379 
380 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
381 			       unsigned int len)
382 {
383 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
384 	struct shash_desc *desc = *descp;
385 	struct crypto_hash_walk walk;
386 	int nbytes;
387 
388 	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
389 	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
390 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
391 
392 	return nbytes;
393 }
394 
395 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
396 {
397 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
398 
399 	return crypto_shash_final(*descp, out);
400 }
401 
402 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
403 			       unsigned int nbytes, u8 *out)
404 {
405 	unsigned int offset = sg->offset;
406 	int err;
407 
408 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
409 		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
410 		struct shash_desc *desc = *descp;
411 		void *data;
412 
413 		desc->flags = hdesc->flags;
414 
415 		data = crypto_kmap(sg_page(sg), 0);
416 		err = crypto_shash_digest(desc, data + offset, nbytes, out);
417 		crypto_kunmap(data, 0);
418 		crypto_yield(desc->flags);
419 		goto out;
420 	}
421 
422 	err = shash_compat_init(hdesc);
423 	if (err)
424 		goto out;
425 
426 	err = shash_compat_update(hdesc, sg, nbytes);
427 	if (err)
428 		goto out;
429 
430 	err = shash_compat_final(hdesc, out);
431 
432 out:
433 	return err;
434 }
435 
436 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
437 {
438 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
439 	struct shash_desc *desc = *descp;
440 
441 	crypto_free_shash(desc->tfm);
442 	kzfree(desc);
443 }
444 
445 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
446 {
447 	struct hash_tfm *crt = &tfm->crt_hash;
448 	struct crypto_alg *calg = tfm->__crt_alg;
449 	struct shash_alg *alg = __crypto_shash_alg(calg);
450 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
451 	struct crypto_shash *shash;
452 	struct shash_desc *desc;
453 
454 	if (!crypto_mod_get(calg))
455 		return -EAGAIN;
456 
457 	shash = crypto_create_tfm(calg, &crypto_shash_type);
458 	if (IS_ERR(shash)) {
459 		crypto_mod_put(calg);
460 		return PTR_ERR(shash);
461 	}
462 
463 	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
464 		       GFP_KERNEL);
465 	if (!desc) {
466 		crypto_free_shash(shash);
467 		return -ENOMEM;
468 	}
469 
470 	*descp = desc;
471 	desc->tfm = shash;
472 	tfm->exit = crypto_exit_shash_ops_compat;
473 
474 	crt->init = shash_compat_init;
475 	crt->update = shash_compat_update;
476 	crt->final  = shash_compat_final;
477 	crt->digest = shash_compat_digest;
478 	crt->setkey = shash_compat_setkey;
479 
480 	crt->digestsize = alg->digestsize;
481 
482 	return 0;
483 }
484 
485 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
486 {
487 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
488 	case CRYPTO_ALG_TYPE_HASH_MASK:
489 		return crypto_init_shash_ops_compat(tfm);
490 	}
491 
492 	return -EINVAL;
493 }
494 
495 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
496 					 u32 mask)
497 {
498 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
499 	case CRYPTO_ALG_TYPE_HASH_MASK:
500 		return sizeof(struct shash_desc *);
501 	}
502 
503 	return 0;
504 }
505 
506 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
507 {
508 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
509 
510 	hash->descsize = crypto_shash_alg(hash)->descsize;
511 	return 0;
512 }
513 
514 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
515 {
516 	return alg->cra_ctxsize;
517 }
518 
519 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
520 	__attribute__ ((unused));
521 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
522 {
523 	struct shash_alg *salg = __crypto_shash_alg(alg);
524 
525 	seq_printf(m, "type         : shash\n");
526 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
527 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
528 }
529 
530 static const struct crypto_type crypto_shash_type = {
531 	.ctxsize = crypto_shash_ctxsize,
532 	.extsize = crypto_shash_extsize,
533 	.init = crypto_init_shash_ops,
534 	.init_tfm = crypto_shash_init_tfm,
535 #ifdef CONFIG_PROC_FS
536 	.show = crypto_shash_show,
537 #endif
538 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
539 	.maskset = CRYPTO_ALG_TYPE_MASK,
540 	.type = CRYPTO_ALG_TYPE_SHASH,
541 	.tfmsize = offsetof(struct crypto_shash, base),
542 };
543 
544 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
545 					u32 mask)
546 {
547 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
548 }
549 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
550 
551 static int shash_prepare_alg(struct shash_alg *alg)
552 {
553 	struct crypto_alg *base = &alg->base;
554 
555 	if (alg->digestsize > PAGE_SIZE / 8 ||
556 	    alg->descsize > PAGE_SIZE / 8 ||
557 	    alg->statesize > PAGE_SIZE / 8)
558 		return -EINVAL;
559 
560 	base->cra_type = &crypto_shash_type;
561 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
562 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
563 
564 	if (!alg->finup)
565 		alg->finup = shash_finup_unaligned;
566 	if (!alg->digest)
567 		alg->digest = shash_digest_unaligned;
568 	if (!alg->export) {
569 		alg->export = shash_default_export;
570 		alg->import = shash_default_import;
571 		alg->statesize = alg->descsize;
572 	}
573 	if (!alg->setkey)
574 		alg->setkey = shash_no_setkey;
575 
576 	return 0;
577 }
578 
579 int crypto_register_shash(struct shash_alg *alg)
580 {
581 	struct crypto_alg *base = &alg->base;
582 	int err;
583 
584 	err = shash_prepare_alg(alg);
585 	if (err)
586 		return err;
587 
588 	return crypto_register_alg(base);
589 }
590 EXPORT_SYMBOL_GPL(crypto_register_shash);
591 
592 int crypto_unregister_shash(struct shash_alg *alg)
593 {
594 	return crypto_unregister_alg(&alg->base);
595 }
596 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
597 
598 int shash_register_instance(struct crypto_template *tmpl,
599 			    struct shash_instance *inst)
600 {
601 	int err;
602 
603 	err = shash_prepare_alg(&inst->alg);
604 	if (err)
605 		return err;
606 
607 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
608 }
609 EXPORT_SYMBOL_GPL(shash_register_instance);
610 
611 void shash_free_instance(struct crypto_instance *inst)
612 {
613 	crypto_drop_spawn(crypto_instance_ctx(inst));
614 	kfree(shash_instance(inst));
615 }
616 EXPORT_SYMBOL_GPL(shash_free_instance);
617 
618 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
619 			    struct shash_alg *alg,
620 			    struct crypto_instance *inst)
621 {
622 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
623 				  &crypto_shash_type);
624 }
625 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
626 
627 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
628 {
629 	struct crypto_alg *alg;
630 
631 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
632 	return IS_ERR(alg) ? ERR_CAST(alg) :
633 	       container_of(alg, struct shash_alg, base);
634 }
635 EXPORT_SYMBOL_GPL(shash_attr_alg);
636 
637 MODULE_LICENSE("GPL");
638 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
639