xref: /linux/crypto/shash.c (revision d3514abcf5b896a3a66d8b7c960a0018a52ebc2c)
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 
23 #include "internal.h"
24 
25 static const struct crypto_type crypto_shash_type;
26 
27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 			   unsigned int keylen)
29 {
30 	return -ENOSYS;
31 }
32 
33 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
34 				  unsigned int keylen)
35 {
36 	struct shash_alg *shash = crypto_shash_alg(tfm);
37 	unsigned long alignmask = crypto_shash_alignmask(tfm);
38 	unsigned long absize;
39 	u8 *buffer, *alignbuffer;
40 	int err;
41 
42 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
43 	buffer = kmalloc(absize, GFP_KERNEL);
44 	if (!buffer)
45 		return -ENOMEM;
46 
47 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48 	memcpy(alignbuffer, key, keylen);
49 	err = shash->setkey(tfm, alignbuffer, keylen);
50 	kzfree(buffer);
51 	return err;
52 }
53 
54 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
55 			unsigned int keylen)
56 {
57 	struct shash_alg *shash = crypto_shash_alg(tfm);
58 	unsigned long alignmask = crypto_shash_alignmask(tfm);
59 
60 	if ((unsigned long)key & alignmask)
61 		return shash_setkey_unaligned(tfm, key, keylen);
62 
63 	return shash->setkey(tfm, key, keylen);
64 }
65 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
66 
67 static inline unsigned int shash_align_buffer_size(unsigned len,
68 						   unsigned long mask)
69 {
70 	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
71 }
72 
73 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
74 				  unsigned int len)
75 {
76 	struct crypto_shash *tfm = desc->tfm;
77 	struct shash_alg *shash = crypto_shash_alg(tfm);
78 	unsigned long alignmask = crypto_shash_alignmask(tfm);
79 	unsigned int unaligned_len = alignmask + 1 -
80 				     ((unsigned long)data & alignmask);
81 	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
82 		__attribute__ ((aligned));
83 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
84 	int err;
85 
86 	if (unaligned_len > len)
87 		unaligned_len = len;
88 
89 	memcpy(buf, data, unaligned_len);
90 	err = shash->update(desc, buf, unaligned_len);
91 	memset(buf, 0, unaligned_len);
92 
93 	return err ?:
94 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
95 }
96 
97 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
98 			unsigned int len)
99 {
100 	struct crypto_shash *tfm = desc->tfm;
101 	struct shash_alg *shash = crypto_shash_alg(tfm);
102 	unsigned long alignmask = crypto_shash_alignmask(tfm);
103 
104 	if ((unsigned long)data & alignmask)
105 		return shash_update_unaligned(desc, data, len);
106 
107 	return shash->update(desc, data, len);
108 }
109 EXPORT_SYMBOL_GPL(crypto_shash_update);
110 
111 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
112 {
113 	struct crypto_shash *tfm = desc->tfm;
114 	unsigned long alignmask = crypto_shash_alignmask(tfm);
115 	struct shash_alg *shash = crypto_shash_alg(tfm);
116 	unsigned int ds = crypto_shash_digestsize(tfm);
117 	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
118 		__attribute__ ((aligned));
119 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
120 	int err;
121 
122 	err = shash->final(desc, buf);
123 	if (err)
124 		goto out;
125 
126 	memcpy(out, buf, ds);
127 
128 out:
129 	memset(buf, 0, ds);
130 	return err;
131 }
132 
133 int crypto_shash_final(struct shash_desc *desc, u8 *out)
134 {
135 	struct crypto_shash *tfm = desc->tfm;
136 	struct shash_alg *shash = crypto_shash_alg(tfm);
137 	unsigned long alignmask = crypto_shash_alignmask(tfm);
138 
139 	if ((unsigned long)out & alignmask)
140 		return shash_final_unaligned(desc, out);
141 
142 	return shash->final(desc, out);
143 }
144 EXPORT_SYMBOL_GPL(crypto_shash_final);
145 
146 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
147 				 unsigned int len, u8 *out)
148 {
149 	return crypto_shash_update(desc, data, len) ?:
150 	       crypto_shash_final(desc, out);
151 }
152 
153 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
154 		       unsigned int len, u8 *out)
155 {
156 	struct crypto_shash *tfm = desc->tfm;
157 	struct shash_alg *shash = crypto_shash_alg(tfm);
158 	unsigned long alignmask = crypto_shash_alignmask(tfm);
159 
160 	if (((unsigned long)data | (unsigned long)out) & alignmask)
161 		return shash_finup_unaligned(desc, data, len, out);
162 
163 	return shash->finup(desc, data, len, out);
164 }
165 EXPORT_SYMBOL_GPL(crypto_shash_finup);
166 
167 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
168 				  unsigned int len, u8 *out)
169 {
170 	return crypto_shash_init(desc) ?:
171 	       crypto_shash_finup(desc, data, len, out);
172 }
173 
174 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
175 			unsigned int len, u8 *out)
176 {
177 	struct crypto_shash *tfm = desc->tfm;
178 	struct shash_alg *shash = crypto_shash_alg(tfm);
179 	unsigned long alignmask = crypto_shash_alignmask(tfm);
180 
181 	if (((unsigned long)data | (unsigned long)out) & alignmask)
182 		return shash_digest_unaligned(desc, data, len, out);
183 
184 	return shash->digest(desc, data, len, out);
185 }
186 EXPORT_SYMBOL_GPL(crypto_shash_digest);
187 
188 static int shash_default_export(struct shash_desc *desc, void *out)
189 {
190 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
191 	return 0;
192 }
193 
194 static int shash_default_import(struct shash_desc *desc, const void *in)
195 {
196 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
197 	return 0;
198 }
199 
200 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
201 			      unsigned int keylen)
202 {
203 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
204 
205 	return crypto_shash_setkey(*ctx, key, keylen);
206 }
207 
208 static int shash_async_init(struct ahash_request *req)
209 {
210 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
211 	struct shash_desc *desc = ahash_request_ctx(req);
212 
213 	desc->tfm = *ctx;
214 	desc->flags = req->base.flags;
215 
216 	return crypto_shash_init(desc);
217 }
218 
219 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
220 {
221 	struct crypto_hash_walk walk;
222 	int nbytes;
223 
224 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
225 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
226 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
227 
228 	return nbytes;
229 }
230 EXPORT_SYMBOL_GPL(shash_ahash_update);
231 
232 static int shash_async_update(struct ahash_request *req)
233 {
234 	return shash_ahash_update(req, ahash_request_ctx(req));
235 }
236 
237 static int shash_async_final(struct ahash_request *req)
238 {
239 	return crypto_shash_final(ahash_request_ctx(req), req->result);
240 }
241 
242 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
243 {
244 	struct crypto_hash_walk walk;
245 	int nbytes;
246 
247 	nbytes = crypto_hash_walk_first(req, &walk);
248 	if (!nbytes)
249 		return crypto_shash_final(desc, req->result);
250 
251 	do {
252 		nbytes = crypto_hash_walk_last(&walk) ?
253 			 crypto_shash_finup(desc, walk.data, nbytes,
254 					    req->result) :
255 			 crypto_shash_update(desc, walk.data, nbytes);
256 		nbytes = crypto_hash_walk_done(&walk, nbytes);
257 	} while (nbytes > 0);
258 
259 	return nbytes;
260 }
261 EXPORT_SYMBOL_GPL(shash_ahash_finup);
262 
263 static int shash_async_finup(struct ahash_request *req)
264 {
265 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
266 	struct shash_desc *desc = ahash_request_ctx(req);
267 
268 	desc->tfm = *ctx;
269 	desc->flags = req->base.flags;
270 
271 	return shash_ahash_finup(req, desc);
272 }
273 
274 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
275 {
276 	struct scatterlist *sg = req->src;
277 	unsigned int offset = sg->offset;
278 	unsigned int nbytes = req->nbytes;
279 	int err;
280 
281 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
282 		void *data;
283 
284 		data = kmap_atomic(sg_page(sg));
285 		err = crypto_shash_digest(desc, data + offset, nbytes,
286 					  req->result);
287 		kunmap_atomic(data);
288 		crypto_yield(desc->flags);
289 	} else
290 		err = crypto_shash_init(desc) ?:
291 		      shash_ahash_finup(req, desc);
292 
293 	return err;
294 }
295 EXPORT_SYMBOL_GPL(shash_ahash_digest);
296 
297 static int shash_async_digest(struct ahash_request *req)
298 {
299 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
300 	struct shash_desc *desc = ahash_request_ctx(req);
301 
302 	desc->tfm = *ctx;
303 	desc->flags = req->base.flags;
304 
305 	return shash_ahash_digest(req, desc);
306 }
307 
308 static int shash_async_export(struct ahash_request *req, void *out)
309 {
310 	return crypto_shash_export(ahash_request_ctx(req), out);
311 }
312 
313 static int shash_async_import(struct ahash_request *req, const void *in)
314 {
315 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
316 	struct shash_desc *desc = ahash_request_ctx(req);
317 
318 	desc->tfm = *ctx;
319 	desc->flags = req->base.flags;
320 
321 	return crypto_shash_import(desc, in);
322 }
323 
324 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
325 {
326 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
327 
328 	crypto_free_shash(*ctx);
329 }
330 
331 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
332 {
333 	struct crypto_alg *calg = tfm->__crt_alg;
334 	struct shash_alg *alg = __crypto_shash_alg(calg);
335 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
336 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
337 	struct crypto_shash *shash;
338 
339 	if (!crypto_mod_get(calg))
340 		return -EAGAIN;
341 
342 	shash = crypto_create_tfm(calg, &crypto_shash_type);
343 	if (IS_ERR(shash)) {
344 		crypto_mod_put(calg);
345 		return PTR_ERR(shash);
346 	}
347 
348 	*ctx = shash;
349 	tfm->exit = crypto_exit_shash_ops_async;
350 
351 	crt->init = shash_async_init;
352 	crt->update = shash_async_update;
353 	crt->final = shash_async_final;
354 	crt->finup = shash_async_finup;
355 	crt->digest = shash_async_digest;
356 
357 	if (alg->setkey)
358 		crt->setkey = shash_async_setkey;
359 	if (alg->export)
360 		crt->export = shash_async_export;
361 	if (alg->import)
362 		crt->import = shash_async_import;
363 
364 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
365 
366 	return 0;
367 }
368 
369 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
370 			       unsigned int keylen)
371 {
372 	struct shash_desc **descp = crypto_hash_ctx(tfm);
373 	struct shash_desc *desc = *descp;
374 
375 	return crypto_shash_setkey(desc->tfm, key, keylen);
376 }
377 
378 static int shash_compat_init(struct hash_desc *hdesc)
379 {
380 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
381 	struct shash_desc *desc = *descp;
382 
383 	desc->flags = hdesc->flags;
384 
385 	return crypto_shash_init(desc);
386 }
387 
388 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
389 			       unsigned int len)
390 {
391 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
392 	struct shash_desc *desc = *descp;
393 	struct crypto_hash_walk walk;
394 	int nbytes;
395 
396 	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
397 	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
398 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
399 
400 	return nbytes;
401 }
402 
403 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
404 {
405 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
406 
407 	return crypto_shash_final(*descp, out);
408 }
409 
410 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
411 			       unsigned int nbytes, u8 *out)
412 {
413 	unsigned int offset = sg->offset;
414 	int err;
415 
416 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
417 		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
418 		struct shash_desc *desc = *descp;
419 		void *data;
420 
421 		desc->flags = hdesc->flags;
422 
423 		data = kmap_atomic(sg_page(sg));
424 		err = crypto_shash_digest(desc, data + offset, nbytes, out);
425 		kunmap_atomic(data);
426 		crypto_yield(desc->flags);
427 		goto out;
428 	}
429 
430 	err = shash_compat_init(hdesc);
431 	if (err)
432 		goto out;
433 
434 	err = shash_compat_update(hdesc, sg, nbytes);
435 	if (err)
436 		goto out;
437 
438 	err = shash_compat_final(hdesc, out);
439 
440 out:
441 	return err;
442 }
443 
444 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
445 {
446 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
447 	struct shash_desc *desc = *descp;
448 
449 	crypto_free_shash(desc->tfm);
450 	kzfree(desc);
451 }
452 
453 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
454 {
455 	struct hash_tfm *crt = &tfm->crt_hash;
456 	struct crypto_alg *calg = tfm->__crt_alg;
457 	struct shash_alg *alg = __crypto_shash_alg(calg);
458 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
459 	struct crypto_shash *shash;
460 	struct shash_desc *desc;
461 
462 	if (!crypto_mod_get(calg))
463 		return -EAGAIN;
464 
465 	shash = crypto_create_tfm(calg, &crypto_shash_type);
466 	if (IS_ERR(shash)) {
467 		crypto_mod_put(calg);
468 		return PTR_ERR(shash);
469 	}
470 
471 	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
472 		       GFP_KERNEL);
473 	if (!desc) {
474 		crypto_free_shash(shash);
475 		return -ENOMEM;
476 	}
477 
478 	*descp = desc;
479 	desc->tfm = shash;
480 	tfm->exit = crypto_exit_shash_ops_compat;
481 
482 	crt->init = shash_compat_init;
483 	crt->update = shash_compat_update;
484 	crt->final  = shash_compat_final;
485 	crt->digest = shash_compat_digest;
486 	crt->setkey = shash_compat_setkey;
487 
488 	crt->digestsize = alg->digestsize;
489 
490 	return 0;
491 }
492 
493 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
494 {
495 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
496 	case CRYPTO_ALG_TYPE_HASH_MASK:
497 		return crypto_init_shash_ops_compat(tfm);
498 	}
499 
500 	return -EINVAL;
501 }
502 
503 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
504 					 u32 mask)
505 {
506 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
507 	case CRYPTO_ALG_TYPE_HASH_MASK:
508 		return sizeof(struct shash_desc *);
509 	}
510 
511 	return 0;
512 }
513 
514 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
515 {
516 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
517 
518 	hash->descsize = crypto_shash_alg(hash)->descsize;
519 	return 0;
520 }
521 
522 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
523 {
524 	return alg->cra_ctxsize;
525 }
526 
527 #ifdef CONFIG_NET
528 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
529 {
530 	struct crypto_report_hash rhash;
531 	struct shash_alg *salg = __crypto_shash_alg(alg);
532 
533 	snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash");
534 	rhash.blocksize = alg->cra_blocksize;
535 	rhash.digestsize = salg->digestsize;
536 
537 	if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
538 		    sizeof(struct crypto_report_hash), &rhash))
539 		goto nla_put_failure;
540 	return 0;
541 
542 nla_put_failure:
543 	return -EMSGSIZE;
544 }
545 #else
546 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
547 {
548 	return -ENOSYS;
549 }
550 #endif
551 
552 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
553 	__attribute__ ((unused));
554 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
555 {
556 	struct shash_alg *salg = __crypto_shash_alg(alg);
557 
558 	seq_printf(m, "type         : shash\n");
559 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
560 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
561 }
562 
563 static const struct crypto_type crypto_shash_type = {
564 	.ctxsize = crypto_shash_ctxsize,
565 	.extsize = crypto_shash_extsize,
566 	.init = crypto_init_shash_ops,
567 	.init_tfm = crypto_shash_init_tfm,
568 #ifdef CONFIG_PROC_FS
569 	.show = crypto_shash_show,
570 #endif
571 	.report = crypto_shash_report,
572 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
573 	.maskset = CRYPTO_ALG_TYPE_MASK,
574 	.type = CRYPTO_ALG_TYPE_SHASH,
575 	.tfmsize = offsetof(struct crypto_shash, base),
576 };
577 
578 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
579 					u32 mask)
580 {
581 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
582 }
583 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
584 
585 static int shash_prepare_alg(struct shash_alg *alg)
586 {
587 	struct crypto_alg *base = &alg->base;
588 
589 	if (alg->digestsize > PAGE_SIZE / 8 ||
590 	    alg->descsize > PAGE_SIZE / 8 ||
591 	    alg->statesize > PAGE_SIZE / 8)
592 		return -EINVAL;
593 
594 	base->cra_type = &crypto_shash_type;
595 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
596 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
597 
598 	if (!alg->finup)
599 		alg->finup = shash_finup_unaligned;
600 	if (!alg->digest)
601 		alg->digest = shash_digest_unaligned;
602 	if (!alg->export) {
603 		alg->export = shash_default_export;
604 		alg->import = shash_default_import;
605 		alg->statesize = alg->descsize;
606 	}
607 	if (!alg->setkey)
608 		alg->setkey = shash_no_setkey;
609 
610 	return 0;
611 }
612 
613 int crypto_register_shash(struct shash_alg *alg)
614 {
615 	struct crypto_alg *base = &alg->base;
616 	int err;
617 
618 	err = shash_prepare_alg(alg);
619 	if (err)
620 		return err;
621 
622 	return crypto_register_alg(base);
623 }
624 EXPORT_SYMBOL_GPL(crypto_register_shash);
625 
626 int crypto_unregister_shash(struct shash_alg *alg)
627 {
628 	return crypto_unregister_alg(&alg->base);
629 }
630 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
631 
632 int shash_register_instance(struct crypto_template *tmpl,
633 			    struct shash_instance *inst)
634 {
635 	int err;
636 
637 	err = shash_prepare_alg(&inst->alg);
638 	if (err)
639 		return err;
640 
641 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
642 }
643 EXPORT_SYMBOL_GPL(shash_register_instance);
644 
645 void shash_free_instance(struct crypto_instance *inst)
646 {
647 	crypto_drop_spawn(crypto_instance_ctx(inst));
648 	kfree(shash_instance(inst));
649 }
650 EXPORT_SYMBOL_GPL(shash_free_instance);
651 
652 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
653 			    struct shash_alg *alg,
654 			    struct crypto_instance *inst)
655 {
656 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
657 				  &crypto_shash_type);
658 }
659 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
660 
661 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
662 {
663 	struct crypto_alg *alg;
664 
665 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
666 	return IS_ERR(alg) ? ERR_CAST(alg) :
667 	       container_of(alg, struct shash_alg, base);
668 }
669 EXPORT_SYMBOL_GPL(shash_attr_alg);
670 
671 MODULE_LICENSE("GPL");
672 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
673