xref: /linux/arch/x86/crypto/cast6_avx_glue.c (revision 6fdcba32711044c35c0e1b094cbd8f3f0b4472c9)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue Code for the AVX assembler implementation of the Cast6 Cipher
4  *
5  * Copyright (C) 2012 Johannes Goetzfried
6  *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7  *
8  * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9  */
10 
11 #include <linux/module.h>
12 #include <linux/types.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <crypto/algapi.h>
16 #include <crypto/cast6.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/xts.h>
19 #include <asm/crypto/glue_helper.h>
20 
21 #define CAST6_PARALLEL_BLOCKS 8
22 
23 asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
24 				   const u8 *src);
25 asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
26 				   const u8 *src);
27 
28 asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
29 				   const u8 *src);
30 asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
31 			       le128 *iv);
32 
33 asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
34 				   const u8 *src, le128 *iv);
35 asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
36 				   const u8 *src, le128 *iv);
37 
38 static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
39 				 const u8 *key, unsigned int keylen)
40 {
41 	return cast6_setkey(&tfm->base, key, keylen);
42 }
43 
44 static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
45 {
46 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
47 				  GLUE_FUNC_CAST(__cast6_encrypt));
48 }
49 
50 static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
51 {
52 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
53 				  GLUE_FUNC_CAST(__cast6_decrypt));
54 }
55 
56 static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
57 {
58 	be128 ctrblk;
59 
60 	le128_to_be128(&ctrblk, iv);
61 	le128_inc(iv);
62 
63 	__cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
64 	u128_xor(dst, src, (u128 *)&ctrblk);
65 }
66 
67 static const struct common_glue_ctx cast6_enc = {
68 	.num_funcs = 2,
69 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
70 
71 	.funcs = { {
72 		.num_blocks = CAST6_PARALLEL_BLOCKS,
73 		.fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
74 	}, {
75 		.num_blocks = 1,
76 		.fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
77 	} }
78 };
79 
80 static const struct common_glue_ctx cast6_ctr = {
81 	.num_funcs = 2,
82 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
83 
84 	.funcs = { {
85 		.num_blocks = CAST6_PARALLEL_BLOCKS,
86 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
87 	}, {
88 		.num_blocks = 1,
89 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
90 	} }
91 };
92 
93 static const struct common_glue_ctx cast6_enc_xts = {
94 	.num_funcs = 2,
95 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
96 
97 	.funcs = { {
98 		.num_blocks = CAST6_PARALLEL_BLOCKS,
99 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
100 	}, {
101 		.num_blocks = 1,
102 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
103 	} }
104 };
105 
106 static const struct common_glue_ctx cast6_dec = {
107 	.num_funcs = 2,
108 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
109 
110 	.funcs = { {
111 		.num_blocks = CAST6_PARALLEL_BLOCKS,
112 		.fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
113 	}, {
114 		.num_blocks = 1,
115 		.fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
116 	} }
117 };
118 
119 static const struct common_glue_ctx cast6_dec_cbc = {
120 	.num_funcs = 2,
121 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
122 
123 	.funcs = { {
124 		.num_blocks = CAST6_PARALLEL_BLOCKS,
125 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
126 	}, {
127 		.num_blocks = 1,
128 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
129 	} }
130 };
131 
132 static const struct common_glue_ctx cast6_dec_xts = {
133 	.num_funcs = 2,
134 	.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
135 
136 	.funcs = { {
137 		.num_blocks = CAST6_PARALLEL_BLOCKS,
138 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
139 	}, {
140 		.num_blocks = 1,
141 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
142 	} }
143 };
144 
145 static int ecb_encrypt(struct skcipher_request *req)
146 {
147 	return glue_ecb_req_128bit(&cast6_enc, req);
148 }
149 
150 static int ecb_decrypt(struct skcipher_request *req)
151 {
152 	return glue_ecb_req_128bit(&cast6_dec, req);
153 }
154 
155 static int cbc_encrypt(struct skcipher_request *req)
156 {
157 	return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__cast6_encrypt),
158 					   req);
159 }
160 
161 static int cbc_decrypt(struct skcipher_request *req)
162 {
163 	return glue_cbc_decrypt_req_128bit(&cast6_dec_cbc, req);
164 }
165 
166 static int ctr_crypt(struct skcipher_request *req)
167 {
168 	return glue_ctr_req_128bit(&cast6_ctr, req);
169 }
170 
171 struct cast6_xts_ctx {
172 	struct cast6_ctx tweak_ctx;
173 	struct cast6_ctx crypt_ctx;
174 };
175 
176 static int xts_cast6_setkey(struct crypto_skcipher *tfm, const u8 *key,
177 			    unsigned int keylen)
178 {
179 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
180 	u32 *flags = &tfm->base.crt_flags;
181 	int err;
182 
183 	err = xts_verify_key(tfm, key, keylen);
184 	if (err)
185 		return err;
186 
187 	/* first half of xts-key is for crypt */
188 	err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
189 	if (err)
190 		return err;
191 
192 	/* second half of xts-key is for tweak */
193 	return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
194 			      flags);
195 }
196 
197 static int xts_encrypt(struct skcipher_request *req)
198 {
199 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
200 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
201 
202 	return glue_xts_req_128bit(&cast6_enc_xts, req,
203 				   XTS_TWEAK_CAST(__cast6_encrypt),
204 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
205 }
206 
207 static int xts_decrypt(struct skcipher_request *req)
208 {
209 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
210 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
211 
212 	return glue_xts_req_128bit(&cast6_dec_xts, req,
213 				   XTS_TWEAK_CAST(__cast6_encrypt),
214 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
215 }
216 
217 static struct skcipher_alg cast6_algs[] = {
218 	{
219 		.base.cra_name		= "__ecb(cast6)",
220 		.base.cra_driver_name	= "__ecb-cast6-avx",
221 		.base.cra_priority	= 200,
222 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
223 		.base.cra_blocksize	= CAST6_BLOCK_SIZE,
224 		.base.cra_ctxsize	= sizeof(struct cast6_ctx),
225 		.base.cra_module	= THIS_MODULE,
226 		.min_keysize		= CAST6_MIN_KEY_SIZE,
227 		.max_keysize		= CAST6_MAX_KEY_SIZE,
228 		.setkey			= cast6_setkey_skcipher,
229 		.encrypt		= ecb_encrypt,
230 		.decrypt		= ecb_decrypt,
231 	}, {
232 		.base.cra_name		= "__cbc(cast6)",
233 		.base.cra_driver_name	= "__cbc-cast6-avx",
234 		.base.cra_priority	= 200,
235 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
236 		.base.cra_blocksize	= CAST6_BLOCK_SIZE,
237 		.base.cra_ctxsize	= sizeof(struct cast6_ctx),
238 		.base.cra_module	= THIS_MODULE,
239 		.min_keysize		= CAST6_MIN_KEY_SIZE,
240 		.max_keysize		= CAST6_MAX_KEY_SIZE,
241 		.ivsize			= CAST6_BLOCK_SIZE,
242 		.setkey			= cast6_setkey_skcipher,
243 		.encrypt		= cbc_encrypt,
244 		.decrypt		= cbc_decrypt,
245 	}, {
246 		.base.cra_name		= "__ctr(cast6)",
247 		.base.cra_driver_name	= "__ctr-cast6-avx",
248 		.base.cra_priority	= 200,
249 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
250 		.base.cra_blocksize	= 1,
251 		.base.cra_ctxsize	= sizeof(struct cast6_ctx),
252 		.base.cra_module	= THIS_MODULE,
253 		.min_keysize		= CAST6_MIN_KEY_SIZE,
254 		.max_keysize		= CAST6_MAX_KEY_SIZE,
255 		.ivsize			= CAST6_BLOCK_SIZE,
256 		.chunksize		= CAST6_BLOCK_SIZE,
257 		.setkey			= cast6_setkey_skcipher,
258 		.encrypt		= ctr_crypt,
259 		.decrypt		= ctr_crypt,
260 	}, {
261 		.base.cra_name		= "__xts(cast6)",
262 		.base.cra_driver_name	= "__xts-cast6-avx",
263 		.base.cra_priority	= 200,
264 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
265 		.base.cra_blocksize	= CAST6_BLOCK_SIZE,
266 		.base.cra_ctxsize	= sizeof(struct cast6_xts_ctx),
267 		.base.cra_module	= THIS_MODULE,
268 		.min_keysize		= 2 * CAST6_MIN_KEY_SIZE,
269 		.max_keysize		= 2 * CAST6_MAX_KEY_SIZE,
270 		.ivsize			= CAST6_BLOCK_SIZE,
271 		.setkey			= xts_cast6_setkey,
272 		.encrypt		= xts_encrypt,
273 		.decrypt		= xts_decrypt,
274 	},
275 };
276 
277 static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
278 
279 static int __init cast6_init(void)
280 {
281 	const char *feature_name;
282 
283 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
284 				&feature_name)) {
285 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
286 		return -ENODEV;
287 	}
288 
289 	return simd_register_skciphers_compat(cast6_algs,
290 					      ARRAY_SIZE(cast6_algs),
291 					      cast6_simd_algs);
292 }
293 
294 static void __exit cast6_exit(void)
295 {
296 	simd_unregister_skciphers(cast6_algs, ARRAY_SIZE(cast6_algs),
297 				  cast6_simd_algs);
298 }
299 
300 module_init(cast6_init);
301 module_exit(cast6_exit);
302 
303 MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
304 MODULE_LICENSE("GPL");
305 MODULE_ALIAS_CRYPTO("cast6");
306