xref: /linux/arch/x86/crypto/twofish_glue_3way.c (revision 41966710ab574f1fcedf3e10e1ceef911c096d1d)
1 /*
2  * Glue Code for 3-way parallel assembler optimized version of Twofish
3  *
4  * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5  *
6  * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7  *   Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8  * CTR part based on code (crypto/ctr.c) by:
9  *   (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10  *
11  * This program is free software; you can redistribute it and/or modify
12  * it under the terms of the GNU General Public License as published by
13  * the Free Software Foundation; either version 2 of the License, or
14  * (at your option) any later version.
15  *
16  * This program is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
19  * GNU General Public License for more details.
20  *
21  * You should have received a copy of the GNU General Public License
22  * along with this program; if not, write to the Free Software
23  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
24  * USA
25  *
26  */
27 
28 #include <linux/crypto.h>
29 #include <linux/init.h>
30 #include <linux/module.h>
31 #include <linux/types.h>
32 #include <crypto/algapi.h>
33 #include <crypto/twofish.h>
34 #include <crypto/b128ops.h>
35 
36 /* regular block cipher functions from twofish_x86_64 module */
37 asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst,
38 				const u8 *src);
39 asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst,
40 				const u8 *src);
41 
42 /* 3-way parallel cipher functions */
43 asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
44 				       const u8 *src, bool xor);
45 asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst,
46 				     const u8 *src);
47 
48 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
49 					const u8 *src)
50 {
51 	__twofish_enc_blk_3way(ctx, dst, src, false);
52 }
53 
54 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst,
55 					    const u8 *src)
56 {
57 	__twofish_enc_blk_3way(ctx, dst, src, true);
58 }
59 
60 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
61 		     void (*fn)(struct twofish_ctx *, u8 *, const u8 *),
62 		     void (*fn_3way)(struct twofish_ctx *, u8 *, const u8 *))
63 {
64 	struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
65 	unsigned int bsize = TF_BLOCK_SIZE;
66 	unsigned int nbytes;
67 	int err;
68 
69 	err = blkcipher_walk_virt(desc, walk);
70 
71 	while ((nbytes = walk->nbytes)) {
72 		u8 *wsrc = walk->src.virt.addr;
73 		u8 *wdst = walk->dst.virt.addr;
74 
75 		/* Process three block batch */
76 		if (nbytes >= bsize * 3) {
77 			do {
78 				fn_3way(ctx, wdst, wsrc);
79 
80 				wsrc += bsize * 3;
81 				wdst += bsize * 3;
82 				nbytes -= bsize * 3;
83 			} while (nbytes >= bsize * 3);
84 
85 			if (nbytes < bsize)
86 				goto done;
87 		}
88 
89 		/* Handle leftovers */
90 		do {
91 			fn(ctx, wdst, wsrc);
92 
93 			wsrc += bsize;
94 			wdst += bsize;
95 			nbytes -= bsize;
96 		} while (nbytes >= bsize);
97 
98 done:
99 		err = blkcipher_walk_done(desc, walk, nbytes);
100 	}
101 
102 	return err;
103 }
104 
105 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
106 		       struct scatterlist *src, unsigned int nbytes)
107 {
108 	struct blkcipher_walk walk;
109 
110 	blkcipher_walk_init(&walk, dst, src, nbytes);
111 	return ecb_crypt(desc, &walk, twofish_enc_blk, twofish_enc_blk_3way);
112 }
113 
114 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
115 		       struct scatterlist *src, unsigned int nbytes)
116 {
117 	struct blkcipher_walk walk;
118 
119 	blkcipher_walk_init(&walk, dst, src, nbytes);
120 	return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way);
121 }
122 
123 static struct crypto_alg blk_ecb_alg = {
124 	.cra_name		= "ecb(twofish)",
125 	.cra_driver_name	= "ecb-twofish-3way",
126 	.cra_priority		= 300,
127 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
128 	.cra_blocksize		= TF_BLOCK_SIZE,
129 	.cra_ctxsize		= sizeof(struct twofish_ctx),
130 	.cra_alignmask		= 0,
131 	.cra_type		= &crypto_blkcipher_type,
132 	.cra_module		= THIS_MODULE,
133 	.cra_list		= LIST_HEAD_INIT(blk_ecb_alg.cra_list),
134 	.cra_u = {
135 		.blkcipher = {
136 			.min_keysize	= TF_MIN_KEY_SIZE,
137 			.max_keysize	= TF_MAX_KEY_SIZE,
138 			.setkey		= twofish_setkey,
139 			.encrypt	= ecb_encrypt,
140 			.decrypt	= ecb_decrypt,
141 		},
142 	},
143 };
144 
145 static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
146 				  struct blkcipher_walk *walk)
147 {
148 	struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
149 	unsigned int bsize = TF_BLOCK_SIZE;
150 	unsigned int nbytes = walk->nbytes;
151 	u128 *src = (u128 *)walk->src.virt.addr;
152 	u128 *dst = (u128 *)walk->dst.virt.addr;
153 	u128 *iv = (u128 *)walk->iv;
154 
155 	do {
156 		u128_xor(dst, src, iv);
157 		twofish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
158 		iv = dst;
159 
160 		src += 1;
161 		dst += 1;
162 		nbytes -= bsize;
163 	} while (nbytes >= bsize);
164 
165 	u128_xor((u128 *)walk->iv, (u128 *)walk->iv, iv);
166 	return nbytes;
167 }
168 
169 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
170 		       struct scatterlist *src, unsigned int nbytes)
171 {
172 	struct blkcipher_walk walk;
173 	int err;
174 
175 	blkcipher_walk_init(&walk, dst, src, nbytes);
176 	err = blkcipher_walk_virt(desc, &walk);
177 
178 	while ((nbytes = walk.nbytes)) {
179 		nbytes = __cbc_encrypt(desc, &walk);
180 		err = blkcipher_walk_done(desc, &walk, nbytes);
181 	}
182 
183 	return err;
184 }
185 
186 static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
187 				  struct blkcipher_walk *walk)
188 {
189 	struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
190 	unsigned int bsize = TF_BLOCK_SIZE;
191 	unsigned int nbytes = walk->nbytes;
192 	u128 *src = (u128 *)walk->src.virt.addr;
193 	u128 *dst = (u128 *)walk->dst.virt.addr;
194 	u128 ivs[3 - 1];
195 	u128 last_iv;
196 
197 	/* Start of the last block. */
198 	src += nbytes / bsize - 1;
199 	dst += nbytes / bsize - 1;
200 
201 	last_iv = *src;
202 
203 	/* Process three block batch */
204 	if (nbytes >= bsize * 3) {
205 		do {
206 			nbytes -= bsize * (3 - 1);
207 			src -= 3 - 1;
208 			dst -= 3 - 1;
209 
210 			ivs[0] = src[0];
211 			ivs[1] = src[1];
212 
213 			twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
214 
215 			u128_xor(dst + 1, dst + 1, ivs + 0);
216 			u128_xor(dst + 2, dst + 2, ivs + 1);
217 
218 			nbytes -= bsize;
219 			if (nbytes < bsize)
220 				goto done;
221 
222 			u128_xor(dst, dst, src - 1);
223 			src -= 1;
224 			dst -= 1;
225 		} while (nbytes >= bsize * 3);
226 
227 		if (nbytes < bsize)
228 			goto done;
229 	}
230 
231 	/* Handle leftovers */
232 	for (;;) {
233 		twofish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
234 
235 		nbytes -= bsize;
236 		if (nbytes < bsize)
237 			break;
238 
239 		u128_xor(dst, dst, src - 1);
240 		src -= 1;
241 		dst -= 1;
242 	}
243 
244 done:
245 	u128_xor(dst, dst, (u128 *)walk->iv);
246 	*(u128 *)walk->iv = last_iv;
247 
248 	return nbytes;
249 }
250 
251 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
252 		       struct scatterlist *src, unsigned int nbytes)
253 {
254 	struct blkcipher_walk walk;
255 	int err;
256 
257 	blkcipher_walk_init(&walk, dst, src, nbytes);
258 	err = blkcipher_walk_virt(desc, &walk);
259 
260 	while ((nbytes = walk.nbytes)) {
261 		nbytes = __cbc_decrypt(desc, &walk);
262 		err = blkcipher_walk_done(desc, &walk, nbytes);
263 	}
264 
265 	return err;
266 }
267 
268 static struct crypto_alg blk_cbc_alg = {
269 	.cra_name		= "cbc(twofish)",
270 	.cra_driver_name	= "cbc-twofish-3way",
271 	.cra_priority		= 300,
272 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
273 	.cra_blocksize		= TF_BLOCK_SIZE,
274 	.cra_ctxsize		= sizeof(struct twofish_ctx),
275 	.cra_alignmask		= 0,
276 	.cra_type		= &crypto_blkcipher_type,
277 	.cra_module		= THIS_MODULE,
278 	.cra_list		= LIST_HEAD_INIT(blk_cbc_alg.cra_list),
279 	.cra_u = {
280 		.blkcipher = {
281 			.min_keysize	= TF_MIN_KEY_SIZE,
282 			.max_keysize	= TF_MAX_KEY_SIZE,
283 			.ivsize		= TF_BLOCK_SIZE,
284 			.setkey		= twofish_setkey,
285 			.encrypt	= cbc_encrypt,
286 			.decrypt	= cbc_decrypt,
287 		},
288 	},
289 };
290 
291 static inline void u128_to_be128(be128 *dst, const u128 *src)
292 {
293 	dst->a = cpu_to_be64(src->a);
294 	dst->b = cpu_to_be64(src->b);
295 }
296 
297 static inline void be128_to_u128(u128 *dst, const be128 *src)
298 {
299 	dst->a = be64_to_cpu(src->a);
300 	dst->b = be64_to_cpu(src->b);
301 }
302 
303 static inline void u128_inc(u128 *i)
304 {
305 	i->b++;
306 	if (!i->b)
307 		i->a++;
308 }
309 
310 static void ctr_crypt_final(struct blkcipher_desc *desc,
311 			    struct blkcipher_walk *walk)
312 {
313 	struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
314 	u8 *ctrblk = walk->iv;
315 	u8 keystream[TF_BLOCK_SIZE];
316 	u8 *src = walk->src.virt.addr;
317 	u8 *dst = walk->dst.virt.addr;
318 	unsigned int nbytes = walk->nbytes;
319 
320 	twofish_enc_blk(ctx, keystream, ctrblk);
321 	crypto_xor(keystream, src, nbytes);
322 	memcpy(dst, keystream, nbytes);
323 
324 	crypto_inc(ctrblk, TF_BLOCK_SIZE);
325 }
326 
327 static unsigned int __ctr_crypt(struct blkcipher_desc *desc,
328 				struct blkcipher_walk *walk)
329 {
330 	struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
331 	unsigned int bsize = TF_BLOCK_SIZE;
332 	unsigned int nbytes = walk->nbytes;
333 	u128 *src = (u128 *)walk->src.virt.addr;
334 	u128 *dst = (u128 *)walk->dst.virt.addr;
335 	u128 ctrblk;
336 	be128 ctrblocks[3];
337 
338 	be128_to_u128(&ctrblk, (be128 *)walk->iv);
339 
340 	/* Process three block batch */
341 	if (nbytes >= bsize * 3) {
342 		do {
343 			if (dst != src) {
344 				dst[0] = src[0];
345 				dst[1] = src[1];
346 				dst[2] = src[2];
347 			}
348 
349 			/* create ctrblks for parallel encrypt */
350 			u128_to_be128(&ctrblocks[0], &ctrblk);
351 			u128_inc(&ctrblk);
352 			u128_to_be128(&ctrblocks[1], &ctrblk);
353 			u128_inc(&ctrblk);
354 			u128_to_be128(&ctrblocks[2], &ctrblk);
355 			u128_inc(&ctrblk);
356 
357 			twofish_enc_blk_xor_3way(ctx, (u8 *)dst,
358 						 (u8 *)ctrblocks);
359 
360 			src += 3;
361 			dst += 3;
362 			nbytes -= bsize * 3;
363 		} while (nbytes >= bsize * 3);
364 
365 		if (nbytes < bsize)
366 			goto done;
367 	}
368 
369 	/* Handle leftovers */
370 	do {
371 		if (dst != src)
372 			*dst = *src;
373 
374 		u128_to_be128(&ctrblocks[0], &ctrblk);
375 		u128_inc(&ctrblk);
376 
377 		twofish_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
378 		u128_xor(dst, dst, (u128 *)ctrblocks);
379 
380 		src += 1;
381 		dst += 1;
382 		nbytes -= bsize;
383 	} while (nbytes >= bsize);
384 
385 done:
386 	u128_to_be128((be128 *)walk->iv, &ctrblk);
387 	return nbytes;
388 }
389 
390 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
391 		     struct scatterlist *src, unsigned int nbytes)
392 {
393 	struct blkcipher_walk walk;
394 	int err;
395 
396 	blkcipher_walk_init(&walk, dst, src, nbytes);
397 	err = blkcipher_walk_virt_block(desc, &walk, TF_BLOCK_SIZE);
398 
399 	while ((nbytes = walk.nbytes) >= TF_BLOCK_SIZE) {
400 		nbytes = __ctr_crypt(desc, &walk);
401 		err = blkcipher_walk_done(desc, &walk, nbytes);
402 	}
403 
404 	if (walk.nbytes) {
405 		ctr_crypt_final(desc, &walk);
406 		err = blkcipher_walk_done(desc, &walk, 0);
407 	}
408 
409 	return err;
410 }
411 
412 static struct crypto_alg blk_ctr_alg = {
413 	.cra_name		= "ctr(twofish)",
414 	.cra_driver_name	= "ctr-twofish-3way",
415 	.cra_priority		= 300,
416 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
417 	.cra_blocksize		= 1,
418 	.cra_ctxsize		= sizeof(struct twofish_ctx),
419 	.cra_alignmask		= 0,
420 	.cra_type		= &crypto_blkcipher_type,
421 	.cra_module		= THIS_MODULE,
422 	.cra_list		= LIST_HEAD_INIT(blk_ctr_alg.cra_list),
423 	.cra_u = {
424 		.blkcipher = {
425 			.min_keysize	= TF_MIN_KEY_SIZE,
426 			.max_keysize	= TF_MAX_KEY_SIZE,
427 			.ivsize		= TF_BLOCK_SIZE,
428 			.setkey		= twofish_setkey,
429 			.encrypt	= ctr_crypt,
430 			.decrypt	= ctr_crypt,
431 		},
432 	},
433 };
434 
435 int __init init(void)
436 {
437 	int err;
438 
439 	err = crypto_register_alg(&blk_ecb_alg);
440 	if (err)
441 		goto ecb_err;
442 	err = crypto_register_alg(&blk_cbc_alg);
443 	if (err)
444 		goto cbc_err;
445 	err = crypto_register_alg(&blk_ctr_alg);
446 	if (err)
447 		goto ctr_err;
448 
449 	return 0;
450 
451 ctr_err:
452 	crypto_unregister_alg(&blk_cbc_alg);
453 cbc_err:
454 	crypto_unregister_alg(&blk_ecb_alg);
455 ecb_err:
456 	return err;
457 }
458 
459 void __exit fini(void)
460 {
461 	crypto_unregister_alg(&blk_ctr_alg);
462 	crypto_unregister_alg(&blk_cbc_alg);
463 	crypto_unregister_alg(&blk_ecb_alg);
464 }
465 
466 module_init(init);
467 module_exit(fini);
468 
469 MODULE_LICENSE("GPL");
470 MODULE_DESCRIPTION("Twofish Cipher Algorithm, 3-way parallel asm optimized");
471 MODULE_ALIAS("twofish");
472 MODULE_ALIAS("twofish-asm");
473