xref: /linux/arch/powerpc/crypto/sha256-spe-glue.c (revision 9b838a3c32d7a1edd7edeec1bc455eca76622218)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue code for SHA-256 implementation for SPE instructions (PPC)
4  *
5  * Based on generic implementation. The assembler module takes care
6  * about the SPE registers so it can run from interrupt context.
7  *
8  * Copyright (c) 2015 Markus Stockhausen <stockhausen@collogia.de>
9  */
10 
11 #include <crypto/internal/hash.h>
12 #include <linux/init.h>
13 #include <linux/module.h>
14 #include <linux/mm.h>
15 #include <linux/types.h>
16 #include <crypto/sha2.h>
17 #include <asm/byteorder.h>
18 #include <asm/switch_to.h>
19 #include <linux/hardirq.h>
20 
21 /*
22  * MAX_BYTES defines the number of bytes that are allowed to be processed
23  * between preempt_disable() and preempt_enable(). SHA256 takes ~2,000
24  * operations per 64 bytes. e500 cores can issue two arithmetic instructions
25  * per clock cycle using one 32/64 bit unit (SU1) and one 32 bit unit (SU2).
26  * Thus 1KB of input data will need an estimated maximum of 18,000 cycles.
27  * Headroom for cache misses included. Even with the low end model clocked
28  * at 667 MHz this equals to a critical time window of less than 27us.
29  *
30  */
31 #define MAX_BYTES 1024
32 
33 extern void ppc_spe_sha256_transform(u32 *state, const u8 *src, u32 blocks);
34 
35 static void spe_begin(void)
36 {
37 	/* We just start SPE operations and will save SPE registers later. */
38 	preempt_disable();
39 	enable_kernel_spe();
40 }
41 
42 static void spe_end(void)
43 {
44 	disable_kernel_spe();
45 	/* reenable preemption */
46 	preempt_enable();
47 }
48 
49 static inline void ppc_sha256_clear_context(struct sha256_state *sctx)
50 {
51 	int count = sizeof(struct sha256_state) >> 2;
52 	u32 *ptr = (u32 *)sctx;
53 
54 	/* make sure we can clear the fast way */
55 	BUILD_BUG_ON(sizeof(struct sha256_state) % 4);
56 	do { *ptr++ = 0; } while (--count);
57 }
58 
59 static int ppc_spe_sha256_init(struct shash_desc *desc)
60 {
61 	struct sha256_state *sctx = shash_desc_ctx(desc);
62 
63 	sctx->state[0] = SHA256_H0;
64 	sctx->state[1] = SHA256_H1;
65 	sctx->state[2] = SHA256_H2;
66 	sctx->state[3] = SHA256_H3;
67 	sctx->state[4] = SHA256_H4;
68 	sctx->state[5] = SHA256_H5;
69 	sctx->state[6] = SHA256_H6;
70 	sctx->state[7] = SHA256_H7;
71 	sctx->count = 0;
72 
73 	return 0;
74 }
75 
76 static int ppc_spe_sha224_init(struct shash_desc *desc)
77 {
78 	struct sha256_state *sctx = shash_desc_ctx(desc);
79 
80 	sctx->state[0] = SHA224_H0;
81 	sctx->state[1] = SHA224_H1;
82 	sctx->state[2] = SHA224_H2;
83 	sctx->state[3] = SHA224_H3;
84 	sctx->state[4] = SHA224_H4;
85 	sctx->state[5] = SHA224_H5;
86 	sctx->state[6] = SHA224_H6;
87 	sctx->state[7] = SHA224_H7;
88 	sctx->count = 0;
89 
90 	return 0;
91 }
92 
93 static int ppc_spe_sha256_update(struct shash_desc *desc, const u8 *data,
94 			unsigned int len)
95 {
96 	struct sha256_state *sctx = shash_desc_ctx(desc);
97 	const unsigned int offset = sctx->count & 0x3f;
98 	const unsigned int avail = 64 - offset;
99 	unsigned int bytes;
100 	const u8 *src = data;
101 
102 	if (avail > len) {
103 		sctx->count += len;
104 		memcpy((char *)sctx->buf + offset, src, len);
105 		return 0;
106 	}
107 
108 	sctx->count += len;
109 
110 	if (offset) {
111 		memcpy((char *)sctx->buf + offset, src, avail);
112 
113 		spe_begin();
114 		ppc_spe_sha256_transform(sctx->state, (const u8 *)sctx->buf, 1);
115 		spe_end();
116 
117 		len -= avail;
118 		src += avail;
119 	}
120 
121 	while (len > 63) {
122 		/* cut input data into smaller blocks */
123 		bytes = (len > MAX_BYTES) ? MAX_BYTES : len;
124 		bytes = bytes & ~0x3f;
125 
126 		spe_begin();
127 		ppc_spe_sha256_transform(sctx->state, src, bytes >> 6);
128 		spe_end();
129 
130 		src += bytes;
131 		len -= bytes;
132 	}
133 
134 	memcpy((char *)sctx->buf, src, len);
135 	return 0;
136 }
137 
138 static int ppc_spe_sha256_final(struct shash_desc *desc, u8 *out)
139 {
140 	struct sha256_state *sctx = shash_desc_ctx(desc);
141 	const unsigned int offset = sctx->count & 0x3f;
142 	char *p = (char *)sctx->buf + offset;
143 	int padlen;
144 	__be64 *pbits = (__be64 *)(((char *)&sctx->buf) + 56);
145 	__be32 *dst = (__be32 *)out;
146 
147 	padlen = 55 - offset;
148 	*p++ = 0x80;
149 
150 	spe_begin();
151 
152 	if (padlen < 0) {
153 		memset(p, 0x00, padlen + sizeof (u64));
154 		ppc_spe_sha256_transform(sctx->state, sctx->buf, 1);
155 		p = (char *)sctx->buf;
156 		padlen = 56;
157 	}
158 
159 	memset(p, 0, padlen);
160 	*pbits = cpu_to_be64(sctx->count << 3);
161 	ppc_spe_sha256_transform(sctx->state, sctx->buf, 1);
162 
163 	spe_end();
164 
165 	dst[0] = cpu_to_be32(sctx->state[0]);
166 	dst[1] = cpu_to_be32(sctx->state[1]);
167 	dst[2] = cpu_to_be32(sctx->state[2]);
168 	dst[3] = cpu_to_be32(sctx->state[3]);
169 	dst[4] = cpu_to_be32(sctx->state[4]);
170 	dst[5] = cpu_to_be32(sctx->state[5]);
171 	dst[6] = cpu_to_be32(sctx->state[6]);
172 	dst[7] = cpu_to_be32(sctx->state[7]);
173 
174 	ppc_sha256_clear_context(sctx);
175 	return 0;
176 }
177 
178 static int ppc_spe_sha224_final(struct shash_desc *desc, u8 *out)
179 {
180 	__be32 D[SHA256_DIGEST_SIZE >> 2];
181 	__be32 *dst = (__be32 *)out;
182 
183 	ppc_spe_sha256_final(desc, (u8 *)D);
184 
185 	/* avoid bytewise memcpy */
186 	dst[0] = D[0];
187 	dst[1] = D[1];
188 	dst[2] = D[2];
189 	dst[3] = D[3];
190 	dst[4] = D[4];
191 	dst[5] = D[5];
192 	dst[6] = D[6];
193 
194 	/* clear sensitive data */
195 	memzero_explicit(D, SHA256_DIGEST_SIZE);
196 	return 0;
197 }
198 
199 static int ppc_spe_sha256_export(struct shash_desc *desc, void *out)
200 {
201 	struct sha256_state *sctx = shash_desc_ctx(desc);
202 
203 	memcpy(out, sctx, sizeof(*sctx));
204 	return 0;
205 }
206 
207 static int ppc_spe_sha256_import(struct shash_desc *desc, const void *in)
208 {
209 	struct sha256_state *sctx = shash_desc_ctx(desc);
210 
211 	memcpy(sctx, in, sizeof(*sctx));
212 	return 0;
213 }
214 
215 static struct shash_alg algs[2] = { {
216 	.digestsize	=	SHA256_DIGEST_SIZE,
217 	.init		=	ppc_spe_sha256_init,
218 	.update		=	ppc_spe_sha256_update,
219 	.final		=	ppc_spe_sha256_final,
220 	.export		=	ppc_spe_sha256_export,
221 	.import		=	ppc_spe_sha256_import,
222 	.descsize	=	sizeof(struct sha256_state),
223 	.statesize	=	sizeof(struct sha256_state),
224 	.base		=	{
225 		.cra_name	=	"sha256",
226 		.cra_driver_name=	"sha256-ppc-spe",
227 		.cra_priority	=	300,
228 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
229 		.cra_module	=	THIS_MODULE,
230 	}
231 }, {
232 	.digestsize	=	SHA224_DIGEST_SIZE,
233 	.init		=	ppc_spe_sha224_init,
234 	.update		=	ppc_spe_sha256_update,
235 	.final		=	ppc_spe_sha224_final,
236 	.export		=	ppc_spe_sha256_export,
237 	.import		=	ppc_spe_sha256_import,
238 	.descsize	=	sizeof(struct sha256_state),
239 	.statesize	=	sizeof(struct sha256_state),
240 	.base		=	{
241 		.cra_name	=	"sha224",
242 		.cra_driver_name=	"sha224-ppc-spe",
243 		.cra_priority	=	300,
244 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
245 		.cra_module	=	THIS_MODULE,
246 	}
247 } };
248 
249 static int __init ppc_spe_sha256_mod_init(void)
250 {
251 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
252 }
253 
254 static void __exit ppc_spe_sha256_mod_fini(void)
255 {
256 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
257 }
258 
259 module_init(ppc_spe_sha256_mod_init);
260 module_exit(ppc_spe_sha256_mod_fini);
261 
262 MODULE_LICENSE("GPL");
263 MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm, SPE optimized");
264 
265 MODULE_ALIAS_CRYPTO("sha224");
266 MODULE_ALIAS_CRYPTO("sha224-ppc-spe");
267 MODULE_ALIAS_CRYPTO("sha256");
268 MODULE_ALIAS_CRYPTO("sha256-ppc-spe");
269