xref: /linux/arch/arm/crypto/sha256_glue.c (revision 17cfcb68af3bc7d5e8ae08779b1853310a2949f3)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
4  * using optimized ARM assembler and NEON instructions.
5  *
6  * Copyright © 2015 Google Inc.
7  *
8  * This file is based on sha256_ssse3_glue.c:
9  *   Copyright (C) 2013 Intel Corporation
10  *   Author: Tim Chen <tim.c.chen@linux.intel.com>
11  */
12 
13 #include <crypto/internal/hash.h>
14 #include <linux/crypto.h>
15 #include <linux/init.h>
16 #include <linux/module.h>
17 #include <linux/mm.h>
18 #include <linux/cryptohash.h>
19 #include <linux/types.h>
20 #include <linux/string.h>
21 #include <crypto/sha.h>
22 #include <crypto/sha256_base.h>
23 #include <asm/simd.h>
24 #include <asm/neon.h>
25 
26 #include "sha256_glue.h"
27 
28 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
29 					unsigned int num_blks);
30 
31 int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
32 			     unsigned int len)
33 {
34 	/* make sure casting to sha256_block_fn() is safe */
35 	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
36 
37 	return sha256_base_do_update(desc, data, len,
38 				(sha256_block_fn *)sha256_block_data_order);
39 }
40 EXPORT_SYMBOL(crypto_sha256_arm_update);
41 
42 static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
43 {
44 	sha256_base_do_finalize(desc,
45 				(sha256_block_fn *)sha256_block_data_order);
46 	return sha256_base_finish(desc, out);
47 }
48 
49 int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
50 			    unsigned int len, u8 *out)
51 {
52 	sha256_base_do_update(desc, data, len,
53 			      (sha256_block_fn *)sha256_block_data_order);
54 	return crypto_sha256_arm_final(desc, out);
55 }
56 EXPORT_SYMBOL(crypto_sha256_arm_finup);
57 
58 static struct shash_alg algs[] = { {
59 	.digestsize	=	SHA256_DIGEST_SIZE,
60 	.init		=	sha256_base_init,
61 	.update		=	crypto_sha256_arm_update,
62 	.final		=	crypto_sha256_arm_final,
63 	.finup		=	crypto_sha256_arm_finup,
64 	.descsize	=	sizeof(struct sha256_state),
65 	.base		=	{
66 		.cra_name	=	"sha256",
67 		.cra_driver_name =	"sha256-asm",
68 		.cra_priority	=	150,
69 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
70 		.cra_module	=	THIS_MODULE,
71 	}
72 }, {
73 	.digestsize	=	SHA224_DIGEST_SIZE,
74 	.init		=	sha224_base_init,
75 	.update		=	crypto_sha256_arm_update,
76 	.final		=	crypto_sha256_arm_final,
77 	.finup		=	crypto_sha256_arm_finup,
78 	.descsize	=	sizeof(struct sha256_state),
79 	.base		=	{
80 		.cra_name	=	"sha224",
81 		.cra_driver_name =	"sha224-asm",
82 		.cra_priority	=	150,
83 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
84 		.cra_module	=	THIS_MODULE,
85 	}
86 } };
87 
88 static int __init sha256_mod_init(void)
89 {
90 	int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
91 
92 	if (res < 0)
93 		return res;
94 
95 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
96 		res = crypto_register_shashes(sha256_neon_algs,
97 					      ARRAY_SIZE(sha256_neon_algs));
98 
99 		if (res < 0)
100 			crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
101 	}
102 
103 	return res;
104 }
105 
106 static void __exit sha256_mod_fini(void)
107 {
108 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
109 
110 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
111 		crypto_unregister_shashes(sha256_neon_algs,
112 					  ARRAY_SIZE(sha256_neon_algs));
113 }
114 
115 module_init(sha256_mod_init);
116 module_exit(sha256_mod_fini);
117 
118 MODULE_LICENSE("GPL");
119 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
120 
121 MODULE_ALIAS_CRYPTO("sha256");
122