xref: /linux/arch/arm64/crypto/sha256-glue.c (revision cbac924200b838cfb8d8b1415113d788089dc50b)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
4  *
5  * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <asm/hwcap.h>
9 #include <asm/neon.h>
10 #include <asm/simd.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha2.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/types.h>
16 #include <linux/string.h>
17 
18 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
19 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22 MODULE_ALIAS_CRYPTO("sha224");
23 MODULE_ALIAS_CRYPTO("sha256");
24 
25 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
26 					unsigned int num_blks);
27 EXPORT_SYMBOL(sha256_block_data_order);
28 
29 static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
30 				      int blocks)
31 {
32 	sha256_block_data_order(sst->state, src, blocks);
33 }
34 
35 asmlinkage void sha256_block_neon(u32 *digest, const void *data,
36 				  unsigned int num_blks);
37 
38 static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
39 				int blocks)
40 {
41 	sha256_block_neon(sst->state, src, blocks);
42 }
43 
44 static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
45 				      unsigned int len)
46 {
47 	return sha256_base_do_update(desc, data, len,
48 				     __sha256_block_data_order);
49 }
50 
51 static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
52 				     unsigned int len, u8 *out)
53 {
54 	if (len)
55 		sha256_base_do_update(desc, data, len,
56 				      __sha256_block_data_order);
57 	sha256_base_do_finalize(desc, __sha256_block_data_order);
58 
59 	return sha256_base_finish(desc, out);
60 }
61 
62 static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out)
63 {
64 	return crypto_sha256_arm64_finup(desc, NULL, 0, out);
65 }
66 
67 static struct shash_alg algs[] = { {
68 	.digestsize		= SHA256_DIGEST_SIZE,
69 	.init			= sha256_base_init,
70 	.update			= crypto_sha256_arm64_update,
71 	.final			= crypto_sha256_arm64_final,
72 	.finup			= crypto_sha256_arm64_finup,
73 	.descsize		= sizeof(struct sha256_state),
74 	.base.cra_name		= "sha256",
75 	.base.cra_driver_name	= "sha256-arm64",
76 	.base.cra_priority	= 125,
77 	.base.cra_blocksize	= SHA256_BLOCK_SIZE,
78 	.base.cra_module	= THIS_MODULE,
79 }, {
80 	.digestsize		= SHA224_DIGEST_SIZE,
81 	.init			= sha224_base_init,
82 	.update			= crypto_sha256_arm64_update,
83 	.final			= crypto_sha256_arm64_final,
84 	.finup			= crypto_sha256_arm64_finup,
85 	.descsize		= sizeof(struct sha256_state),
86 	.base.cra_name		= "sha224",
87 	.base.cra_driver_name	= "sha224-arm64",
88 	.base.cra_priority	= 125,
89 	.base.cra_blocksize	= SHA224_BLOCK_SIZE,
90 	.base.cra_module	= THIS_MODULE,
91 } };
92 
93 static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
94 			      unsigned int len)
95 {
96 	struct sha256_state *sctx = shash_desc_ctx(desc);
97 
98 	if (!crypto_simd_usable())
99 		return sha256_base_do_update(desc, data, len,
100 				__sha256_block_data_order);
101 
102 	while (len > 0) {
103 		unsigned int chunk = len;
104 
105 		/*
106 		 * Don't hog the CPU for the entire time it takes to process all
107 		 * input when running on a preemptible kernel, but process the
108 		 * data block by block instead.
109 		 */
110 		if (IS_ENABLED(CONFIG_PREEMPTION) &&
111 		    chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
112 			chunk = SHA256_BLOCK_SIZE -
113 				sctx->count % SHA256_BLOCK_SIZE;
114 
115 		kernel_neon_begin();
116 		sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
117 		kernel_neon_end();
118 		data += chunk;
119 		len -= chunk;
120 	}
121 	return 0;
122 }
123 
124 static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
125 			     unsigned int len, u8 *out)
126 {
127 	if (!crypto_simd_usable()) {
128 		if (len)
129 			sha256_base_do_update(desc, data, len,
130 				__sha256_block_data_order);
131 		sha256_base_do_finalize(desc, __sha256_block_data_order);
132 	} else {
133 		if (len)
134 			sha256_update_neon(desc, data, len);
135 		kernel_neon_begin();
136 		sha256_base_do_finalize(desc, __sha256_block_neon);
137 		kernel_neon_end();
138 	}
139 	return sha256_base_finish(desc, out);
140 }
141 
142 static int sha256_final_neon(struct shash_desc *desc, u8 *out)
143 {
144 	return sha256_finup_neon(desc, NULL, 0, out);
145 }
146 
147 static struct shash_alg neon_algs[] = { {
148 	.digestsize		= SHA256_DIGEST_SIZE,
149 	.init			= sha256_base_init,
150 	.update			= sha256_update_neon,
151 	.final			= sha256_final_neon,
152 	.finup			= sha256_finup_neon,
153 	.descsize		= sizeof(struct sha256_state),
154 	.base.cra_name		= "sha256",
155 	.base.cra_driver_name	= "sha256-arm64-neon",
156 	.base.cra_priority	= 150,
157 	.base.cra_blocksize	= SHA256_BLOCK_SIZE,
158 	.base.cra_module	= THIS_MODULE,
159 }, {
160 	.digestsize		= SHA224_DIGEST_SIZE,
161 	.init			= sha224_base_init,
162 	.update			= sha256_update_neon,
163 	.final			= sha256_final_neon,
164 	.finup			= sha256_finup_neon,
165 	.descsize		= sizeof(struct sha256_state),
166 	.base.cra_name		= "sha224",
167 	.base.cra_driver_name	= "sha224-arm64-neon",
168 	.base.cra_priority	= 150,
169 	.base.cra_blocksize	= SHA224_BLOCK_SIZE,
170 	.base.cra_module	= THIS_MODULE,
171 } };
172 
173 static int __init sha256_mod_init(void)
174 {
175 	int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
176 	if (ret)
177 		return ret;
178 
179 	if (cpu_have_named_feature(ASIMD)) {
180 		ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
181 		if (ret)
182 			crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
183 	}
184 	return ret;
185 }
186 
187 static void __exit sha256_mod_fini(void)
188 {
189 	if (cpu_have_named_feature(ASIMD))
190 		crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
191 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
192 }
193 
194 module_init(sha256_mod_init);
195 module_exit(sha256_mod_fini);
196