xref: /linux/crypto/shash.c (revision b8bb76713ec50df2f11efee386e16f93d51e1076)
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 
21 #include "internal.h"
22 
23 static const struct crypto_type crypto_shash_type;
24 
25 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
26 				  unsigned int keylen)
27 {
28 	struct shash_alg *shash = crypto_shash_alg(tfm);
29 	unsigned long alignmask = crypto_shash_alignmask(tfm);
30 	unsigned long absize;
31 	u8 *buffer, *alignbuffer;
32 	int err;
33 
34 	absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
35 	buffer = kmalloc(absize, GFP_KERNEL);
36 	if (!buffer)
37 		return -ENOMEM;
38 
39 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 	memcpy(alignbuffer, key, keylen);
41 	err = shash->setkey(tfm, alignbuffer, keylen);
42 	memset(alignbuffer, 0, keylen);
43 	kfree(buffer);
44 	return err;
45 }
46 
47 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
48 			unsigned int keylen)
49 {
50 	struct shash_alg *shash = crypto_shash_alg(tfm);
51 	unsigned long alignmask = crypto_shash_alignmask(tfm);
52 
53 	if (!shash->setkey)
54 		return -ENOSYS;
55 
56 	if ((unsigned long)key & alignmask)
57 		return shash_setkey_unaligned(tfm, key, keylen);
58 
59 	return shash->setkey(tfm, key, keylen);
60 }
61 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
62 
63 static inline unsigned int shash_align_buffer_size(unsigned len,
64 						   unsigned long mask)
65 {
66 	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
67 }
68 
69 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
70 				  unsigned int len)
71 {
72 	struct crypto_shash *tfm = desc->tfm;
73 	struct shash_alg *shash = crypto_shash_alg(tfm);
74 	unsigned long alignmask = crypto_shash_alignmask(tfm);
75 	unsigned int unaligned_len = alignmask + 1 -
76 				     ((unsigned long)data & alignmask);
77 	u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
78 		__attribute__ ((aligned));
79 
80 	memcpy(buf, data, unaligned_len);
81 
82 	return shash->update(desc, buf, unaligned_len) ?:
83 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
84 }
85 
86 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
87 			unsigned int len)
88 {
89 	struct crypto_shash *tfm = desc->tfm;
90 	struct shash_alg *shash = crypto_shash_alg(tfm);
91 	unsigned long alignmask = crypto_shash_alignmask(tfm);
92 
93 	if ((unsigned long)data & alignmask)
94 		return shash_update_unaligned(desc, data, len);
95 
96 	return shash->update(desc, data, len);
97 }
98 EXPORT_SYMBOL_GPL(crypto_shash_update);
99 
100 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
101 {
102 	struct crypto_shash *tfm = desc->tfm;
103 	unsigned long alignmask = crypto_shash_alignmask(tfm);
104 	struct shash_alg *shash = crypto_shash_alg(tfm);
105 	unsigned int ds = crypto_shash_digestsize(tfm);
106 	u8 buf[shash_align_buffer_size(ds, alignmask)]
107 		__attribute__ ((aligned));
108 	int err;
109 
110 	err = shash->final(desc, buf);
111 	memcpy(out, buf, ds);
112 	return err;
113 }
114 
115 int crypto_shash_final(struct shash_desc *desc, u8 *out)
116 {
117 	struct crypto_shash *tfm = desc->tfm;
118 	struct shash_alg *shash = crypto_shash_alg(tfm);
119 	unsigned long alignmask = crypto_shash_alignmask(tfm);
120 
121 	if ((unsigned long)out & alignmask)
122 		return shash_final_unaligned(desc, out);
123 
124 	return shash->final(desc, out);
125 }
126 EXPORT_SYMBOL_GPL(crypto_shash_final);
127 
128 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
129 				 unsigned int len, u8 *out)
130 {
131 	return crypto_shash_update(desc, data, len) ?:
132 	       crypto_shash_final(desc, out);
133 }
134 
135 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
136 		       unsigned int len, u8 *out)
137 {
138 	struct crypto_shash *tfm = desc->tfm;
139 	struct shash_alg *shash = crypto_shash_alg(tfm);
140 	unsigned long alignmask = crypto_shash_alignmask(tfm);
141 
142 	if (((unsigned long)data | (unsigned long)out) & alignmask ||
143 	    !shash->finup)
144 		return shash_finup_unaligned(desc, data, len, out);
145 
146 	return shash->finup(desc, data, len, out);
147 }
148 EXPORT_SYMBOL_GPL(crypto_shash_finup);
149 
150 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
151 				  unsigned int len, u8 *out)
152 {
153 	return crypto_shash_init(desc) ?:
154 	       crypto_shash_update(desc, data, len) ?:
155 	       crypto_shash_final(desc, out);
156 }
157 
158 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
159 			unsigned int len, u8 *out)
160 {
161 	struct crypto_shash *tfm = desc->tfm;
162 	struct shash_alg *shash = crypto_shash_alg(tfm);
163 	unsigned long alignmask = crypto_shash_alignmask(tfm);
164 
165 	if (((unsigned long)data | (unsigned long)out) & alignmask ||
166 	    !shash->digest)
167 		return shash_digest_unaligned(desc, data, len, out);
168 
169 	return shash->digest(desc, data, len, out);
170 }
171 EXPORT_SYMBOL_GPL(crypto_shash_digest);
172 
173 int crypto_shash_import(struct shash_desc *desc, const u8 *in)
174 {
175 	struct crypto_shash *tfm = desc->tfm;
176 	struct shash_alg *alg = crypto_shash_alg(tfm);
177 
178 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
179 
180 	if (alg->reinit)
181 		alg->reinit(desc);
182 
183 	return 0;
184 }
185 EXPORT_SYMBOL_GPL(crypto_shash_import);
186 
187 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
188 			      unsigned int keylen)
189 {
190 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
191 
192 	return crypto_shash_setkey(*ctx, key, keylen);
193 }
194 
195 static int shash_async_init(struct ahash_request *req)
196 {
197 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
198 	struct shash_desc *desc = ahash_request_ctx(req);
199 
200 	desc->tfm = *ctx;
201 	desc->flags = req->base.flags;
202 
203 	return crypto_shash_init(desc);
204 }
205 
206 static int shash_async_update(struct ahash_request *req)
207 {
208 	struct shash_desc *desc = ahash_request_ctx(req);
209 	struct crypto_hash_walk walk;
210 	int nbytes;
211 
212 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
213 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
214 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
215 
216 	return nbytes;
217 }
218 
219 static int shash_async_final(struct ahash_request *req)
220 {
221 	return crypto_shash_final(ahash_request_ctx(req), req->result);
222 }
223 
224 static int shash_async_digest(struct ahash_request *req)
225 {
226 	struct scatterlist *sg = req->src;
227 	unsigned int offset = sg->offset;
228 	unsigned int nbytes = req->nbytes;
229 	int err;
230 
231 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
232 		struct crypto_shash **ctx =
233 			crypto_ahash_ctx(crypto_ahash_reqtfm(req));
234 		struct shash_desc *desc = ahash_request_ctx(req);
235 		void *data;
236 
237 		desc->tfm = *ctx;
238 		desc->flags = req->base.flags;
239 
240 		data = crypto_kmap(sg_page(sg), 0);
241 		err = crypto_shash_digest(desc, data + offset, nbytes,
242 					  req->result);
243 		crypto_kunmap(data, 0);
244 		crypto_yield(desc->flags);
245 		goto out;
246 	}
247 
248 	err = shash_async_init(req);
249 	if (err)
250 		goto out;
251 
252 	err = shash_async_update(req);
253 	if (err)
254 		goto out;
255 
256 	err = shash_async_final(req);
257 
258 out:
259 	return err;
260 }
261 
262 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
263 {
264 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
265 
266 	crypto_free_shash(*ctx);
267 }
268 
269 static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
270 {
271 	struct crypto_alg *calg = tfm->__crt_alg;
272 	struct shash_alg *alg = __crypto_shash_alg(calg);
273 	struct ahash_tfm *crt = &tfm->crt_ahash;
274 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
275 	struct crypto_shash *shash;
276 
277 	if (!crypto_mod_get(calg))
278 		return -EAGAIN;
279 
280 	shash = crypto_create_tfm(calg, &crypto_shash_type);
281 	if (IS_ERR(shash)) {
282 		crypto_mod_put(calg);
283 		return PTR_ERR(shash);
284 	}
285 
286 	*ctx = shash;
287 	tfm->exit = crypto_exit_shash_ops_async;
288 
289 	crt->init = shash_async_init;
290 	crt->update = shash_async_update;
291 	crt->final  = shash_async_final;
292 	crt->digest = shash_async_digest;
293 	crt->setkey = shash_async_setkey;
294 
295 	crt->digestsize = alg->digestsize;
296 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
297 
298 	return 0;
299 }
300 
301 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
302 			       unsigned int keylen)
303 {
304 	struct shash_desc *desc = crypto_hash_ctx(tfm);
305 
306 	return crypto_shash_setkey(desc->tfm, key, keylen);
307 }
308 
309 static int shash_compat_init(struct hash_desc *hdesc)
310 {
311 	struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
312 
313 	desc->flags = hdesc->flags;
314 
315 	return crypto_shash_init(desc);
316 }
317 
318 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
319 			       unsigned int len)
320 {
321 	struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
322 	struct crypto_hash_walk walk;
323 	int nbytes;
324 
325 	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
326 	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
327 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
328 
329 	return nbytes;
330 }
331 
332 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
333 {
334 	return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
335 }
336 
337 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
338 			       unsigned int nbytes, u8 *out)
339 {
340 	unsigned int offset = sg->offset;
341 	int err;
342 
343 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
344 		struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
345 		void *data;
346 
347 		desc->flags = hdesc->flags;
348 
349 		data = crypto_kmap(sg_page(sg), 0);
350 		err = crypto_shash_digest(desc, data + offset, nbytes, out);
351 		crypto_kunmap(data, 0);
352 		crypto_yield(desc->flags);
353 		goto out;
354 	}
355 
356 	err = shash_compat_init(hdesc);
357 	if (err)
358 		goto out;
359 
360 	err = shash_compat_update(hdesc, sg, nbytes);
361 	if (err)
362 		goto out;
363 
364 	err = shash_compat_final(hdesc, out);
365 
366 out:
367 	return err;
368 }
369 
370 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
371 {
372 	struct shash_desc *desc= crypto_tfm_ctx(tfm);
373 
374 	crypto_free_shash(desc->tfm);
375 }
376 
377 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
378 {
379 	struct hash_tfm *crt = &tfm->crt_hash;
380 	struct crypto_alg *calg = tfm->__crt_alg;
381 	struct shash_alg *alg = __crypto_shash_alg(calg);
382 	struct shash_desc *desc = crypto_tfm_ctx(tfm);
383 	struct crypto_shash *shash;
384 
385 	if (!crypto_mod_get(calg))
386 		return -EAGAIN;
387 
388 	shash = crypto_create_tfm(calg, &crypto_shash_type);
389 	if (IS_ERR(shash)) {
390 		crypto_mod_put(calg);
391 		return PTR_ERR(shash);
392 	}
393 
394 	desc->tfm = shash;
395 	tfm->exit = crypto_exit_shash_ops_compat;
396 
397 	crt->init = shash_compat_init;
398 	crt->update = shash_compat_update;
399 	crt->final  = shash_compat_final;
400 	crt->digest = shash_compat_digest;
401 	crt->setkey = shash_compat_setkey;
402 
403 	crt->digestsize = alg->digestsize;
404 
405 	return 0;
406 }
407 
408 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
409 {
410 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
411 	case CRYPTO_ALG_TYPE_HASH_MASK:
412 		return crypto_init_shash_ops_compat(tfm);
413 	case CRYPTO_ALG_TYPE_AHASH_MASK:
414 		return crypto_init_shash_ops_async(tfm);
415 	}
416 
417 	return -EINVAL;
418 }
419 
420 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
421 					 u32 mask)
422 {
423 	struct shash_alg *salg = __crypto_shash_alg(alg);
424 
425 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
426 	case CRYPTO_ALG_TYPE_HASH_MASK:
427 		return sizeof(struct shash_desc) + salg->descsize;
428 	case CRYPTO_ALG_TYPE_AHASH_MASK:
429 		return sizeof(struct crypto_shash *);
430 	}
431 
432 	return 0;
433 }
434 
435 static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
436 				 const struct crypto_type *frontend)
437 {
438 	return 0;
439 }
440 
441 static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
442 					 const struct crypto_type *frontend)
443 {
444 	return alg->cra_ctxsize;
445 }
446 
447 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
448 	__attribute__ ((unused));
449 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
450 {
451 	struct shash_alg *salg = __crypto_shash_alg(alg);
452 
453 	seq_printf(m, "type         : shash\n");
454 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
455 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
456 	seq_printf(m, "descsize     : %u\n", salg->descsize);
457 }
458 
459 static const struct crypto_type crypto_shash_type = {
460 	.ctxsize = crypto_shash_ctxsize,
461 	.extsize = crypto_shash_extsize,
462 	.init = crypto_init_shash_ops,
463 	.init_tfm = crypto_shash_init_tfm,
464 #ifdef CONFIG_PROC_FS
465 	.show = crypto_shash_show,
466 #endif
467 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
468 	.maskset = CRYPTO_ALG_TYPE_MASK,
469 	.type = CRYPTO_ALG_TYPE_SHASH,
470 	.tfmsize = offsetof(struct crypto_shash, base),
471 };
472 
473 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
474 					u32 mask)
475 {
476 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
477 }
478 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
479 
480 int crypto_register_shash(struct shash_alg *alg)
481 {
482 	struct crypto_alg *base = &alg->base;
483 
484 	if (alg->digestsize > PAGE_SIZE / 8 ||
485 	    alg->descsize > PAGE_SIZE / 8)
486 		return -EINVAL;
487 
488 	base->cra_type = &crypto_shash_type;
489 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
490 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
491 
492 	return crypto_register_alg(base);
493 }
494 EXPORT_SYMBOL_GPL(crypto_register_shash);
495 
496 int crypto_unregister_shash(struct shash_alg *alg)
497 {
498 	return crypto_unregister_alg(&alg->base);
499 }
500 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
501 
502 MODULE_LICENSE("GPL");
503 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
504