xref: /linux/arch/x86/crypto/des3_ede_glue.c (revision bfd5bb6f90af092aa345b15cd78143956a13c2a8)
1 /*
2  * Glue Code for assembler optimized version of 3DES
3  *
4  * Copyright © 2014 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5  *
6  * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7  *   Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8  * CTR part based on code (crypto/ctr.c) by:
9  *   (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
10  *
11  * This program is free software; you can redistribute it and/or modify
12  * it under the terms of the GNU General Public License as published by
13  * the Free Software Foundation; either version 2 of the License, or
14  * (at your option) any later version.
15  *
16  * This program is distributed in the hope that it will be useful,
17  * but WITHOUT ANY WARRANTY; without even the implied warranty of
18  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
19  * GNU General Public License for more details.
20  *
21  */
22 
23 #include <crypto/algapi.h>
24 #include <crypto/des.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/crypto.h>
27 #include <linux/init.h>
28 #include <linux/module.h>
29 #include <linux/types.h>
30 
31 struct des3_ede_x86_ctx {
32 	u32 enc_expkey[DES3_EDE_EXPKEY_WORDS];
33 	u32 dec_expkey[DES3_EDE_EXPKEY_WORDS];
34 };
35 
36 /* regular block cipher functions */
37 asmlinkage void des3_ede_x86_64_crypt_blk(const u32 *expkey, u8 *dst,
38 					  const u8 *src);
39 
40 /* 3-way parallel cipher functions */
41 asmlinkage void des3_ede_x86_64_crypt_blk_3way(const u32 *expkey, u8 *dst,
42 					       const u8 *src);
43 
44 static inline void des3_ede_enc_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
45 				    const u8 *src)
46 {
47 	u32 *enc_ctx = ctx->enc_expkey;
48 
49 	des3_ede_x86_64_crypt_blk(enc_ctx, dst, src);
50 }
51 
52 static inline void des3_ede_dec_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
53 				    const u8 *src)
54 {
55 	u32 *dec_ctx = ctx->dec_expkey;
56 
57 	des3_ede_x86_64_crypt_blk(dec_ctx, dst, src);
58 }
59 
60 static inline void des3_ede_enc_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
61 					 const u8 *src)
62 {
63 	u32 *enc_ctx = ctx->enc_expkey;
64 
65 	des3_ede_x86_64_crypt_blk_3way(enc_ctx, dst, src);
66 }
67 
68 static inline void des3_ede_dec_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
69 					 const u8 *src)
70 {
71 	u32 *dec_ctx = ctx->dec_expkey;
72 
73 	des3_ede_x86_64_crypt_blk_3way(dec_ctx, dst, src);
74 }
75 
76 static void des3_ede_x86_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
77 {
78 	des3_ede_enc_blk(crypto_tfm_ctx(tfm), dst, src);
79 }
80 
81 static void des3_ede_x86_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
82 {
83 	des3_ede_dec_blk(crypto_tfm_ctx(tfm), dst, src);
84 }
85 
86 static int ecb_crypt(struct skcipher_request *req, const u32 *expkey)
87 {
88 	const unsigned int bsize = DES3_EDE_BLOCK_SIZE;
89 	struct skcipher_walk walk;
90 	unsigned int nbytes;
91 	int err;
92 
93 	err = skcipher_walk_virt(&walk, req, false);
94 
95 	while ((nbytes = walk.nbytes)) {
96 		u8 *wsrc = walk.src.virt.addr;
97 		u8 *wdst = walk.dst.virt.addr;
98 
99 		/* Process four block batch */
100 		if (nbytes >= bsize * 3) {
101 			do {
102 				des3_ede_x86_64_crypt_blk_3way(expkey, wdst,
103 							       wsrc);
104 
105 				wsrc += bsize * 3;
106 				wdst += bsize * 3;
107 				nbytes -= bsize * 3;
108 			} while (nbytes >= bsize * 3);
109 
110 			if (nbytes < bsize)
111 				goto done;
112 		}
113 
114 		/* Handle leftovers */
115 		do {
116 			des3_ede_x86_64_crypt_blk(expkey, wdst, wsrc);
117 
118 			wsrc += bsize;
119 			wdst += bsize;
120 			nbytes -= bsize;
121 		} while (nbytes >= bsize);
122 
123 done:
124 		err = skcipher_walk_done(&walk, nbytes);
125 	}
126 
127 	return err;
128 }
129 
130 static int ecb_encrypt(struct skcipher_request *req)
131 {
132 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
133 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
134 
135 	return ecb_crypt(req, ctx->enc_expkey);
136 }
137 
138 static int ecb_decrypt(struct skcipher_request *req)
139 {
140 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
141 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
142 
143 	return ecb_crypt(req, ctx->dec_expkey);
144 }
145 
146 static unsigned int __cbc_encrypt(struct des3_ede_x86_ctx *ctx,
147 				  struct skcipher_walk *walk)
148 {
149 	unsigned int bsize = DES3_EDE_BLOCK_SIZE;
150 	unsigned int nbytes = walk->nbytes;
151 	u64 *src = (u64 *)walk->src.virt.addr;
152 	u64 *dst = (u64 *)walk->dst.virt.addr;
153 	u64 *iv = (u64 *)walk->iv;
154 
155 	do {
156 		*dst = *src ^ *iv;
157 		des3_ede_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
158 		iv = dst;
159 
160 		src += 1;
161 		dst += 1;
162 		nbytes -= bsize;
163 	} while (nbytes >= bsize);
164 
165 	*(u64 *)walk->iv = *iv;
166 	return nbytes;
167 }
168 
169 static int cbc_encrypt(struct skcipher_request *req)
170 {
171 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
172 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
173 	struct skcipher_walk walk;
174 	unsigned int nbytes;
175 	int err;
176 
177 	err = skcipher_walk_virt(&walk, req, false);
178 
179 	while ((nbytes = walk.nbytes)) {
180 		nbytes = __cbc_encrypt(ctx, &walk);
181 		err = skcipher_walk_done(&walk, nbytes);
182 	}
183 
184 	return err;
185 }
186 
187 static unsigned int __cbc_decrypt(struct des3_ede_x86_ctx *ctx,
188 				  struct skcipher_walk *walk)
189 {
190 	unsigned int bsize = DES3_EDE_BLOCK_SIZE;
191 	unsigned int nbytes = walk->nbytes;
192 	u64 *src = (u64 *)walk->src.virt.addr;
193 	u64 *dst = (u64 *)walk->dst.virt.addr;
194 	u64 ivs[3 - 1];
195 	u64 last_iv;
196 
197 	/* Start of the last block. */
198 	src += nbytes / bsize - 1;
199 	dst += nbytes / bsize - 1;
200 
201 	last_iv = *src;
202 
203 	/* Process four block batch */
204 	if (nbytes >= bsize * 3) {
205 		do {
206 			nbytes -= bsize * 3 - bsize;
207 			src -= 3 - 1;
208 			dst -= 3 - 1;
209 
210 			ivs[0] = src[0];
211 			ivs[1] = src[1];
212 
213 			des3_ede_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
214 
215 			dst[1] ^= ivs[0];
216 			dst[2] ^= ivs[1];
217 
218 			nbytes -= bsize;
219 			if (nbytes < bsize)
220 				goto done;
221 
222 			*dst ^= *(src - 1);
223 			src -= 1;
224 			dst -= 1;
225 		} while (nbytes >= bsize * 3);
226 	}
227 
228 	/* Handle leftovers */
229 	for (;;) {
230 		des3_ede_dec_blk(ctx, (u8 *)dst, (u8 *)src);
231 
232 		nbytes -= bsize;
233 		if (nbytes < bsize)
234 			break;
235 
236 		*dst ^= *(src - 1);
237 		src -= 1;
238 		dst -= 1;
239 	}
240 
241 done:
242 	*dst ^= *(u64 *)walk->iv;
243 	*(u64 *)walk->iv = last_iv;
244 
245 	return nbytes;
246 }
247 
248 static int cbc_decrypt(struct skcipher_request *req)
249 {
250 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
251 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
252 	struct skcipher_walk walk;
253 	unsigned int nbytes;
254 	int err;
255 
256 	err = skcipher_walk_virt(&walk, req, false);
257 
258 	while ((nbytes = walk.nbytes)) {
259 		nbytes = __cbc_decrypt(ctx, &walk);
260 		err = skcipher_walk_done(&walk, nbytes);
261 	}
262 
263 	return err;
264 }
265 
266 static void ctr_crypt_final(struct des3_ede_x86_ctx *ctx,
267 			    struct skcipher_walk *walk)
268 {
269 	u8 *ctrblk = walk->iv;
270 	u8 keystream[DES3_EDE_BLOCK_SIZE];
271 	u8 *src = walk->src.virt.addr;
272 	u8 *dst = walk->dst.virt.addr;
273 	unsigned int nbytes = walk->nbytes;
274 
275 	des3_ede_enc_blk(ctx, keystream, ctrblk);
276 	crypto_xor_cpy(dst, keystream, src, nbytes);
277 
278 	crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE);
279 }
280 
281 static unsigned int __ctr_crypt(struct des3_ede_x86_ctx *ctx,
282 				struct skcipher_walk *walk)
283 {
284 	unsigned int bsize = DES3_EDE_BLOCK_SIZE;
285 	unsigned int nbytes = walk->nbytes;
286 	__be64 *src = (__be64 *)walk->src.virt.addr;
287 	__be64 *dst = (__be64 *)walk->dst.virt.addr;
288 	u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
289 	__be64 ctrblocks[3];
290 
291 	/* Process four block batch */
292 	if (nbytes >= bsize * 3) {
293 		do {
294 			/* create ctrblks for parallel encrypt */
295 			ctrblocks[0] = cpu_to_be64(ctrblk++);
296 			ctrblocks[1] = cpu_to_be64(ctrblk++);
297 			ctrblocks[2] = cpu_to_be64(ctrblk++);
298 
299 			des3_ede_enc_blk_3way(ctx, (u8 *)ctrblocks,
300 					      (u8 *)ctrblocks);
301 
302 			dst[0] = src[0] ^ ctrblocks[0];
303 			dst[1] = src[1] ^ ctrblocks[1];
304 			dst[2] = src[2] ^ ctrblocks[2];
305 
306 			src += 3;
307 			dst += 3;
308 		} while ((nbytes -= bsize * 3) >= bsize * 3);
309 
310 		if (nbytes < bsize)
311 			goto done;
312 	}
313 
314 	/* Handle leftovers */
315 	do {
316 		ctrblocks[0] = cpu_to_be64(ctrblk++);
317 
318 		des3_ede_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
319 
320 		dst[0] = src[0] ^ ctrblocks[0];
321 
322 		src += 1;
323 		dst += 1;
324 	} while ((nbytes -= bsize) >= bsize);
325 
326 done:
327 	*(__be64 *)walk->iv = cpu_to_be64(ctrblk);
328 	return nbytes;
329 }
330 
331 static int ctr_crypt(struct skcipher_request *req)
332 {
333 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
334 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
335 	struct skcipher_walk walk;
336 	unsigned int nbytes;
337 	int err;
338 
339 	err = skcipher_walk_virt(&walk, req, false);
340 
341 	while ((nbytes = walk.nbytes) >= DES3_EDE_BLOCK_SIZE) {
342 		nbytes = __ctr_crypt(ctx, &walk);
343 		err = skcipher_walk_done(&walk, nbytes);
344 	}
345 
346 	if (nbytes) {
347 		ctr_crypt_final(ctx, &walk);
348 		err = skcipher_walk_done(&walk, 0);
349 	}
350 
351 	return err;
352 }
353 
354 static int des3_ede_x86_setkey(struct crypto_tfm *tfm, const u8 *key,
355 			       unsigned int keylen)
356 {
357 	struct des3_ede_x86_ctx *ctx = crypto_tfm_ctx(tfm);
358 	u32 i, j, tmp;
359 	int err;
360 
361 	/* Generate encryption context using generic implementation. */
362 	err = __des3_ede_setkey(ctx->enc_expkey, &tfm->crt_flags, key, keylen);
363 	if (err < 0)
364 		return err;
365 
366 	/* Fix encryption context for this implementation and form decryption
367 	 * context. */
368 	j = DES3_EDE_EXPKEY_WORDS - 2;
369 	for (i = 0; i < DES3_EDE_EXPKEY_WORDS; i += 2, j -= 2) {
370 		tmp = ror32(ctx->enc_expkey[i + 1], 4);
371 		ctx->enc_expkey[i + 1] = tmp;
372 
373 		ctx->dec_expkey[j + 0] = ctx->enc_expkey[i + 0];
374 		ctx->dec_expkey[j + 1] = tmp;
375 	}
376 
377 	return 0;
378 }
379 
380 static int des3_ede_x86_setkey_skcipher(struct crypto_skcipher *tfm,
381 					const u8 *key,
382 					unsigned int keylen)
383 {
384 	return des3_ede_x86_setkey(&tfm->base, key, keylen);
385 }
386 
387 static struct crypto_alg des3_ede_cipher = {
388 	.cra_name		= "des3_ede",
389 	.cra_driver_name	= "des3_ede-asm",
390 	.cra_priority		= 200,
391 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
392 	.cra_blocksize		= DES3_EDE_BLOCK_SIZE,
393 	.cra_ctxsize		= sizeof(struct des3_ede_x86_ctx),
394 	.cra_alignmask		= 0,
395 	.cra_module		= THIS_MODULE,
396 	.cra_u = {
397 		.cipher = {
398 			.cia_min_keysize	= DES3_EDE_KEY_SIZE,
399 			.cia_max_keysize	= DES3_EDE_KEY_SIZE,
400 			.cia_setkey		= des3_ede_x86_setkey,
401 			.cia_encrypt		= des3_ede_x86_encrypt,
402 			.cia_decrypt		= des3_ede_x86_decrypt,
403 		}
404 	}
405 };
406 
407 static struct skcipher_alg des3_ede_skciphers[] = {
408 	{
409 		.base.cra_name		= "ecb(des3_ede)",
410 		.base.cra_driver_name	= "ecb-des3_ede-asm",
411 		.base.cra_priority	= 300,
412 		.base.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
413 		.base.cra_ctxsize	= sizeof(struct des3_ede_x86_ctx),
414 		.base.cra_module	= THIS_MODULE,
415 		.min_keysize		= DES3_EDE_KEY_SIZE,
416 		.max_keysize		= DES3_EDE_KEY_SIZE,
417 		.setkey			= des3_ede_x86_setkey_skcipher,
418 		.encrypt		= ecb_encrypt,
419 		.decrypt		= ecb_decrypt,
420 	}, {
421 		.base.cra_name		= "cbc(des3_ede)",
422 		.base.cra_driver_name	= "cbc-des3_ede-asm",
423 		.base.cra_priority	= 300,
424 		.base.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
425 		.base.cra_ctxsize	= sizeof(struct des3_ede_x86_ctx),
426 		.base.cra_module	= THIS_MODULE,
427 		.min_keysize		= DES3_EDE_KEY_SIZE,
428 		.max_keysize		= DES3_EDE_KEY_SIZE,
429 		.ivsize			= DES3_EDE_BLOCK_SIZE,
430 		.setkey			= des3_ede_x86_setkey_skcipher,
431 		.encrypt		= cbc_encrypt,
432 		.decrypt		= cbc_decrypt,
433 	}, {
434 		.base.cra_name		= "ctr(des3_ede)",
435 		.base.cra_driver_name	= "ctr-des3_ede-asm",
436 		.base.cra_priority	= 300,
437 		.base.cra_blocksize	= 1,
438 		.base.cra_ctxsize	= sizeof(struct des3_ede_x86_ctx),
439 		.base.cra_module	= THIS_MODULE,
440 		.min_keysize		= DES3_EDE_KEY_SIZE,
441 		.max_keysize		= DES3_EDE_KEY_SIZE,
442 		.ivsize			= DES3_EDE_BLOCK_SIZE,
443 		.chunksize		= DES3_EDE_BLOCK_SIZE,
444 		.setkey			= des3_ede_x86_setkey_skcipher,
445 		.encrypt		= ctr_crypt,
446 		.decrypt		= ctr_crypt,
447 	}
448 };
449 
450 static bool is_blacklisted_cpu(void)
451 {
452 	if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
453 		return false;
454 
455 	if (boot_cpu_data.x86 == 0x0f) {
456 		/*
457 		 * On Pentium 4, des3_ede-x86_64 is slower than generic C
458 		 * implementation because use of 64bit rotates (which are really
459 		 * slow on P4). Therefore blacklist P4s.
460 		 */
461 		return true;
462 	}
463 
464 	return false;
465 }
466 
467 static int force;
468 module_param(force, int, 0);
469 MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
470 
471 static int __init des3_ede_x86_init(void)
472 {
473 	int err;
474 
475 	if (!force && is_blacklisted_cpu()) {
476 		pr_info("des3_ede-x86_64: performance on this CPU would be suboptimal: disabling des3_ede-x86_64.\n");
477 		return -ENODEV;
478 	}
479 
480 	err = crypto_register_alg(&des3_ede_cipher);
481 	if (err)
482 		return err;
483 
484 	err = crypto_register_skciphers(des3_ede_skciphers,
485 					ARRAY_SIZE(des3_ede_skciphers));
486 	if (err)
487 		crypto_unregister_alg(&des3_ede_cipher);
488 
489 	return err;
490 }
491 
492 static void __exit des3_ede_x86_fini(void)
493 {
494 	crypto_unregister_alg(&des3_ede_cipher);
495 	crypto_unregister_skciphers(des3_ede_skciphers,
496 				    ARRAY_SIZE(des3_ede_skciphers));
497 }
498 
499 module_init(des3_ede_x86_init);
500 module_exit(des3_ede_x86_fini);
501 
502 MODULE_LICENSE("GPL");
503 MODULE_DESCRIPTION("Triple DES EDE Cipher Algorithm, asm optimized");
504 MODULE_ALIAS_CRYPTO("des3_ede");
505 MODULE_ALIAS_CRYPTO("des3_ede-asm");
506 MODULE_AUTHOR("Jussi Kivilinna <jussi.kivilinna@iki.fi>");
507