xref: /linux/arch/s390/crypto/crc32-vx.c (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Crypto-API module for CRC-32 algorithms implemented with the
4  * z/Architecture Vector Extension Facility.
5  *
6  * Copyright IBM Corp. 2015
7  * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
8  */
9 #define KMSG_COMPONENT	"crc32-vx"
10 #define pr_fmt(fmt)	KMSG_COMPONENT ": " fmt
11 
12 #include <linux/module.h>
13 #include <linux/cpufeature.h>
14 #include <linux/crc32.h>
15 #include <crypto/internal/hash.h>
16 #include <asm/fpu.h>
17 #include "crc32-vx.h"
18 
19 #define CRC32_BLOCK_SIZE	1
20 #define CRC32_DIGEST_SIZE	4
21 
22 #define VX_MIN_LEN		64
23 #define VX_ALIGNMENT		16L
24 #define VX_ALIGN_MASK		(VX_ALIGNMENT - 1)
25 
26 struct crc_ctx {
27 	u32 key;
28 };
29 
30 struct crc_desc_ctx {
31 	u32 crc;
32 };
33 
34 /*
35  * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
36  *
37  * Creates a function to perform a particular CRC-32 computation. Depending
38  * on the message buffer, the hardware-accelerated or software implementation
39  * is used.   Note that the message buffer is aligned to improve fetch
40  * operations of VECTOR LOAD MULTIPLE instructions.
41  *
42  */
43 #define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw)		    \
44 	static u32 __pure ___fname(u32 crc,				    \
45 				unsigned char const *data, size_t datalen)  \
46 	{								    \
47 		unsigned long prealign, aligned, remaining;		    \
48 		DECLARE_KERNEL_FPU_ONSTACK16(vxstate);			    \
49 									    \
50 		if (datalen < VX_MIN_LEN + VX_ALIGN_MASK)		    \
51 			return ___crc32_sw(crc, data, datalen);		    \
52 									    \
53 		if ((unsigned long)data & VX_ALIGN_MASK) {		    \
54 			prealign = VX_ALIGNMENT -			    \
55 				  ((unsigned long)data & VX_ALIGN_MASK);    \
56 			datalen -= prealign;				    \
57 			crc = ___crc32_sw(crc, data, prealign);		    \
58 			data = (void *)((unsigned long)data + prealign);    \
59 		}							    \
60 									    \
61 		aligned = datalen & ~VX_ALIGN_MASK;			    \
62 		remaining = datalen & VX_ALIGN_MASK;			    \
63 									    \
64 		kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW);		    \
65 		crc = ___crc32_vx(crc, data, aligned);			    \
66 		kernel_fpu_end(&vxstate, KERNEL_VXR_LOW);		    \
67 									    \
68 		if (remaining)						    \
69 			crc = ___crc32_sw(crc, data + aligned, remaining);  \
70 									    \
71 		return crc;						    \
72 	}
73 
DEFINE_CRC32_VX(crc32_le_vx,crc32_le_vgfm_16,crc32_le)74 DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
75 DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
76 DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
77 
78 
79 static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
80 {
81 	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
82 
83 	mctx->key = 0;
84 	return 0;
85 }
86 
crc32_vx_cra_init_invert(struct crypto_tfm * tfm)87 static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
88 {
89 	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
90 
91 	mctx->key = ~0;
92 	return 0;
93 }
94 
crc32_vx_init(struct shash_desc * desc)95 static int crc32_vx_init(struct shash_desc *desc)
96 {
97 	struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
98 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
99 
100 	ctx->crc = mctx->key;
101 	return 0;
102 }
103 
crc32_vx_setkey(struct crypto_shash * tfm,const u8 * newkey,unsigned int newkeylen)104 static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
105 			   unsigned int newkeylen)
106 {
107 	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
108 
109 	if (newkeylen != sizeof(mctx->key))
110 		return -EINVAL;
111 	mctx->key = le32_to_cpu(*(__le32 *)newkey);
112 	return 0;
113 }
114 
crc32be_vx_setkey(struct crypto_shash * tfm,const u8 * newkey,unsigned int newkeylen)115 static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
116 			     unsigned int newkeylen)
117 {
118 	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
119 
120 	if (newkeylen != sizeof(mctx->key))
121 		return -EINVAL;
122 	mctx->key = be32_to_cpu(*(__be32 *)newkey);
123 	return 0;
124 }
125 
crc32le_vx_final(struct shash_desc * desc,u8 * out)126 static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
127 {
128 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
129 
130 	*(__le32 *)out = cpu_to_le32p(&ctx->crc);
131 	return 0;
132 }
133 
crc32be_vx_final(struct shash_desc * desc,u8 * out)134 static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
135 {
136 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
137 
138 	*(__be32 *)out = cpu_to_be32p(&ctx->crc);
139 	return 0;
140 }
141 
crc32c_vx_final(struct shash_desc * desc,u8 * out)142 static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
143 {
144 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
145 
146 	/*
147 	 * Perform a final XOR with 0xFFFFFFFF to be in sync
148 	 * with the generic crc32c shash implementation.
149 	 */
150 	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
151 	return 0;
152 }
153 
__crc32le_vx_finup(u32 * crc,const u8 * data,unsigned int len,u8 * out)154 static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
155 			      u8 *out)
156 {
157 	*(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
158 	return 0;
159 }
160 
__crc32be_vx_finup(u32 * crc,const u8 * data,unsigned int len,u8 * out)161 static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
162 			      u8 *out)
163 {
164 	*(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
165 	return 0;
166 }
167 
__crc32c_vx_finup(u32 * crc,const u8 * data,unsigned int len,u8 * out)168 static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
169 			     u8 *out)
170 {
171 	/*
172 	 * Perform a final XOR with 0xFFFFFFFF to be in sync
173 	 * with the generic crc32c shash implementation.
174 	 */
175 	*(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
176 	return 0;
177 }
178 
179 
180 #define CRC32_VX_FINUP(alg, func)					      \
181 	static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data,  \
182 				   unsigned int datalen, u8 *out)	      \
183 	{								      \
184 		return __ ## alg ## _vx_finup(shash_desc_ctx(desc),	      \
185 					      data, datalen, out);	      \
186 	}
187 
188 CRC32_VX_FINUP(crc32le, crc32_le_vx)
189 CRC32_VX_FINUP(crc32be, crc32_be_vx)
190 CRC32_VX_FINUP(crc32c, crc32c_le_vx)
191 
192 #define CRC32_VX_DIGEST(alg, func)					      \
193 	static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
194 				     unsigned int len, u8 *out)		      \
195 	{								      \
196 		return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm),    \
197 					      data, len, out);		      \
198 	}
199 
200 CRC32_VX_DIGEST(crc32le, crc32_le_vx)
201 CRC32_VX_DIGEST(crc32be, crc32_be_vx)
202 CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
203 
204 #define CRC32_VX_UPDATE(alg, func)					      \
205 	static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
206 				     unsigned int datalen)		      \
207 	{								      \
208 		struct crc_desc_ctx *ctx = shash_desc_ctx(desc);	      \
209 		ctx->crc = func(ctx->crc, data, datalen);		      \
210 		return 0;						      \
211 	}
212 
213 CRC32_VX_UPDATE(crc32le, crc32_le_vx)
214 CRC32_VX_UPDATE(crc32be, crc32_be_vx)
215 CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
216 
217 
218 static struct shash_alg crc32_vx_algs[] = {
219 	/* CRC-32 LE */
220 	{
221 		.init		=	crc32_vx_init,
222 		.setkey		=	crc32_vx_setkey,
223 		.update		=	crc32le_vx_update,
224 		.final		=	crc32le_vx_final,
225 		.finup		=	crc32le_vx_finup,
226 		.digest		=	crc32le_vx_digest,
227 		.descsize	=	sizeof(struct crc_desc_ctx),
228 		.digestsize	=	CRC32_DIGEST_SIZE,
229 		.base		=	{
230 			.cra_name	 = "crc32",
231 			.cra_driver_name = "crc32-vx",
232 			.cra_priority	 = 200,
233 			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
234 			.cra_blocksize	 = CRC32_BLOCK_SIZE,
235 			.cra_ctxsize	 = sizeof(struct crc_ctx),
236 			.cra_module	 = THIS_MODULE,
237 			.cra_init	 = crc32_vx_cra_init_zero,
238 		},
239 	},
240 	/* CRC-32 BE */
241 	{
242 		.init		=	crc32_vx_init,
243 		.setkey		=	crc32be_vx_setkey,
244 		.update		=	crc32be_vx_update,
245 		.final		=	crc32be_vx_final,
246 		.finup		=	crc32be_vx_finup,
247 		.digest		=	crc32be_vx_digest,
248 		.descsize	=	sizeof(struct crc_desc_ctx),
249 		.digestsize	=	CRC32_DIGEST_SIZE,
250 		.base		=	{
251 			.cra_name	 = "crc32be",
252 			.cra_driver_name = "crc32be-vx",
253 			.cra_priority	 = 200,
254 			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
255 			.cra_blocksize	 = CRC32_BLOCK_SIZE,
256 			.cra_ctxsize	 = sizeof(struct crc_ctx),
257 			.cra_module	 = THIS_MODULE,
258 			.cra_init	 = crc32_vx_cra_init_zero,
259 		},
260 	},
261 	/* CRC-32C LE */
262 	{
263 		.init		=	crc32_vx_init,
264 		.setkey		=	crc32_vx_setkey,
265 		.update		=	crc32c_vx_update,
266 		.final		=	crc32c_vx_final,
267 		.finup		=	crc32c_vx_finup,
268 		.digest		=	crc32c_vx_digest,
269 		.descsize	=	sizeof(struct crc_desc_ctx),
270 		.digestsize	=	CRC32_DIGEST_SIZE,
271 		.base		=	{
272 			.cra_name	 = "crc32c",
273 			.cra_driver_name = "crc32c-vx",
274 			.cra_priority	 = 200,
275 			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
276 			.cra_blocksize	 = CRC32_BLOCK_SIZE,
277 			.cra_ctxsize	 = sizeof(struct crc_ctx),
278 			.cra_module	 = THIS_MODULE,
279 			.cra_init	 = crc32_vx_cra_init_invert,
280 		},
281 	},
282 };
283 
284 
crc_vx_mod_init(void)285 static int __init crc_vx_mod_init(void)
286 {
287 	return crypto_register_shashes(crc32_vx_algs,
288 				       ARRAY_SIZE(crc32_vx_algs));
289 }
290 
crc_vx_mod_exit(void)291 static void __exit crc_vx_mod_exit(void)
292 {
293 	crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
294 }
295 
296 module_cpu_feature_match(S390_CPU_FEATURE_VXRS, crc_vx_mod_init);
297 module_exit(crc_vx_mod_exit);
298 
299 MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
300 MODULE_DESCRIPTION("CRC-32 algorithms using z/Architecture Vector Extension Facility");
301 MODULE_LICENSE("GPL");
302 
303 MODULE_ALIAS_CRYPTO("crc32");
304 MODULE_ALIAS_CRYPTO("crc32-vx");
305 MODULE_ALIAS_CRYPTO("crc32c");
306 MODULE_ALIAS_CRYPTO("crc32c-vx");
307