xref: /linux/arch/x86/crypto/serpent_avx_glue.c (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 /*
2  * Glue Code for AVX assembler versions of Serpent Cipher
3  *
4  * Copyright (C) 2012 Johannes Goetzfried
5  *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6  *
7  * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation; either version 2 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the GNU General Public License
20  * along with this program; if not, write to the Free Software
21  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
22  * USA
23  *
24  */
25 
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/ablk_helper.h>
32 #include <crypto/algapi.h>
33 #include <crypto/serpent.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
39 #include <asm/fpu/api.h>
40 #include <asm/crypto/serpent-avx.h>
41 #include <asm/crypto/glue_helper.h>
42 
43 /* 8-way parallel cipher functions */
44 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
45 					 const u8 *src);
46 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
47 
48 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
49 					 const u8 *src);
50 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
51 
52 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
53 					 const u8 *src);
54 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
55 
56 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
57 				     const u8 *src, le128 *iv);
58 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
59 
60 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
61 					 const u8 *src, le128 *iv);
62 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
63 
64 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
65 					 const u8 *src, le128 *iv);
66 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
67 
68 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
69 {
70 	be128 ctrblk;
71 
72 	le128_to_be128(&ctrblk, iv);
73 	le128_inc(iv);
74 
75 	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
76 	u128_xor(dst, src, (u128 *)&ctrblk);
77 }
78 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
79 
80 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
81 {
82 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
83 				  GLUE_FUNC_CAST(__serpent_encrypt));
84 }
85 EXPORT_SYMBOL_GPL(serpent_xts_enc);
86 
87 void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
88 {
89 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
90 				  GLUE_FUNC_CAST(__serpent_decrypt));
91 }
92 EXPORT_SYMBOL_GPL(serpent_xts_dec);
93 
94 
95 static const struct common_glue_ctx serpent_enc = {
96 	.num_funcs = 2,
97 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
98 
99 	.funcs = { {
100 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
101 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
102 	}, {
103 		.num_blocks = 1,
104 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
105 	} }
106 };
107 
108 static const struct common_glue_ctx serpent_ctr = {
109 	.num_funcs = 2,
110 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
111 
112 	.funcs = { {
113 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
114 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
115 	}, {
116 		.num_blocks = 1,
117 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
118 	} }
119 };
120 
121 static const struct common_glue_ctx serpent_enc_xts = {
122 	.num_funcs = 2,
123 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
124 
125 	.funcs = { {
126 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
127 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
128 	}, {
129 		.num_blocks = 1,
130 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
131 	} }
132 };
133 
134 static const struct common_glue_ctx serpent_dec = {
135 	.num_funcs = 2,
136 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
137 
138 	.funcs = { {
139 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
140 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
141 	}, {
142 		.num_blocks = 1,
143 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
144 	} }
145 };
146 
147 static const struct common_glue_ctx serpent_dec_cbc = {
148 	.num_funcs = 2,
149 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
150 
151 	.funcs = { {
152 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
153 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
154 	}, {
155 		.num_blocks = 1,
156 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
157 	} }
158 };
159 
160 static const struct common_glue_ctx serpent_dec_xts = {
161 	.num_funcs = 2,
162 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
163 
164 	.funcs = { {
165 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
166 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
167 	}, {
168 		.num_blocks = 1,
169 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
170 	} }
171 };
172 
173 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
174 		       struct scatterlist *src, unsigned int nbytes)
175 {
176 	return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
177 }
178 
179 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
180 		       struct scatterlist *src, unsigned int nbytes)
181 {
182 	return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
183 }
184 
185 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
186 		       struct scatterlist *src, unsigned int nbytes)
187 {
188 	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
189 				     dst, src, nbytes);
190 }
191 
192 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
193 		       struct scatterlist *src, unsigned int nbytes)
194 {
195 	return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
196 				       nbytes);
197 }
198 
199 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
200 		     struct scatterlist *src, unsigned int nbytes)
201 {
202 	return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
203 }
204 
205 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
206 {
207 	return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
208 			      NULL, fpu_enabled, nbytes);
209 }
210 
211 static inline void serpent_fpu_end(bool fpu_enabled)
212 {
213 	glue_fpu_end(fpu_enabled);
214 }
215 
216 struct crypt_priv {
217 	struct serpent_ctx *ctx;
218 	bool fpu_enabled;
219 };
220 
221 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
222 {
223 	const unsigned int bsize = SERPENT_BLOCK_SIZE;
224 	struct crypt_priv *ctx = priv;
225 	int i;
226 
227 	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
228 
229 	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
230 		serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
231 		return;
232 	}
233 
234 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
235 		__serpent_encrypt(ctx->ctx, srcdst, srcdst);
236 }
237 
238 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
239 {
240 	const unsigned int bsize = SERPENT_BLOCK_SIZE;
241 	struct crypt_priv *ctx = priv;
242 	int i;
243 
244 	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
245 
246 	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
247 		serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
248 		return;
249 	}
250 
251 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
252 		__serpent_decrypt(ctx->ctx, srcdst, srcdst);
253 }
254 
255 int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
256 		       unsigned int keylen)
257 {
258 	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
259 	int err;
260 
261 	err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
262 							SERPENT_BLOCK_SIZE);
263 	if (err)
264 		return err;
265 
266 	return lrw_init_table(&ctx->lrw_table, key + keylen -
267 						SERPENT_BLOCK_SIZE);
268 }
269 EXPORT_SYMBOL_GPL(lrw_serpent_setkey);
270 
271 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
272 		       struct scatterlist *src, unsigned int nbytes)
273 {
274 	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
275 	be128 buf[SERPENT_PARALLEL_BLOCKS];
276 	struct crypt_priv crypt_ctx = {
277 		.ctx = &ctx->serpent_ctx,
278 		.fpu_enabled = false,
279 	};
280 	struct lrw_crypt_req req = {
281 		.tbuf = buf,
282 		.tbuflen = sizeof(buf),
283 
284 		.table_ctx = &ctx->lrw_table,
285 		.crypt_ctx = &crypt_ctx,
286 		.crypt_fn = encrypt_callback,
287 	};
288 	int ret;
289 
290 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
291 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
292 	serpent_fpu_end(crypt_ctx.fpu_enabled);
293 
294 	return ret;
295 }
296 
297 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
298 		       struct scatterlist *src, unsigned int nbytes)
299 {
300 	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
301 	be128 buf[SERPENT_PARALLEL_BLOCKS];
302 	struct crypt_priv crypt_ctx = {
303 		.ctx = &ctx->serpent_ctx,
304 		.fpu_enabled = false,
305 	};
306 	struct lrw_crypt_req req = {
307 		.tbuf = buf,
308 		.tbuflen = sizeof(buf),
309 
310 		.table_ctx = &ctx->lrw_table,
311 		.crypt_ctx = &crypt_ctx,
312 		.crypt_fn = decrypt_callback,
313 	};
314 	int ret;
315 
316 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
317 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
318 	serpent_fpu_end(crypt_ctx.fpu_enabled);
319 
320 	return ret;
321 }
322 
323 void lrw_serpent_exit_tfm(struct crypto_tfm *tfm)
324 {
325 	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
326 
327 	lrw_free_table(&ctx->lrw_table);
328 }
329 EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm);
330 
331 int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
332 		       unsigned int keylen)
333 {
334 	struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
335 	u32 *flags = &tfm->crt_flags;
336 	int err;
337 
338 	/* key consists of keys of equal size concatenated, therefore
339 	 * the length must be even
340 	 */
341 	if (keylen % 2) {
342 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
343 		return -EINVAL;
344 	}
345 
346 	/* first half of xts-key is for crypt */
347 	err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
348 	if (err)
349 		return err;
350 
351 	/* second half of xts-key is for tweak */
352 	return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
353 }
354 EXPORT_SYMBOL_GPL(xts_serpent_setkey);
355 
356 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
357 		       struct scatterlist *src, unsigned int nbytes)
358 {
359 	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
360 
361 	return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
362 				     XTS_TWEAK_CAST(__serpent_encrypt),
363 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
364 }
365 
366 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
367 		       struct scatterlist *src, unsigned int nbytes)
368 {
369 	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
370 
371 	return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
372 				     XTS_TWEAK_CAST(__serpent_encrypt),
373 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
374 }
375 
376 static struct crypto_alg serpent_algs[10] = { {
377 	.cra_name		= "__ecb-serpent-avx",
378 	.cra_driver_name	= "__driver-ecb-serpent-avx",
379 	.cra_priority		= 0,
380 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
381 				  CRYPTO_ALG_INTERNAL,
382 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
383 	.cra_ctxsize		= sizeof(struct serpent_ctx),
384 	.cra_alignmask		= 0,
385 	.cra_type		= &crypto_blkcipher_type,
386 	.cra_module		= THIS_MODULE,
387 	.cra_u = {
388 		.blkcipher = {
389 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
390 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
391 			.setkey		= serpent_setkey,
392 			.encrypt	= ecb_encrypt,
393 			.decrypt	= ecb_decrypt,
394 		},
395 	},
396 }, {
397 	.cra_name		= "__cbc-serpent-avx",
398 	.cra_driver_name	= "__driver-cbc-serpent-avx",
399 	.cra_priority		= 0,
400 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
401 				  CRYPTO_ALG_INTERNAL,
402 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
403 	.cra_ctxsize		= sizeof(struct serpent_ctx),
404 	.cra_alignmask		= 0,
405 	.cra_type		= &crypto_blkcipher_type,
406 	.cra_module		= THIS_MODULE,
407 	.cra_u = {
408 		.blkcipher = {
409 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
410 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
411 			.setkey		= serpent_setkey,
412 			.encrypt	= cbc_encrypt,
413 			.decrypt	= cbc_decrypt,
414 		},
415 	},
416 }, {
417 	.cra_name		= "__ctr-serpent-avx",
418 	.cra_driver_name	= "__driver-ctr-serpent-avx",
419 	.cra_priority		= 0,
420 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
421 				  CRYPTO_ALG_INTERNAL,
422 	.cra_blocksize		= 1,
423 	.cra_ctxsize		= sizeof(struct serpent_ctx),
424 	.cra_alignmask		= 0,
425 	.cra_type		= &crypto_blkcipher_type,
426 	.cra_module		= THIS_MODULE,
427 	.cra_u = {
428 		.blkcipher = {
429 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
430 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
431 			.ivsize		= SERPENT_BLOCK_SIZE,
432 			.setkey		= serpent_setkey,
433 			.encrypt	= ctr_crypt,
434 			.decrypt	= ctr_crypt,
435 		},
436 	},
437 }, {
438 	.cra_name		= "__lrw-serpent-avx",
439 	.cra_driver_name	= "__driver-lrw-serpent-avx",
440 	.cra_priority		= 0,
441 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
442 				  CRYPTO_ALG_INTERNAL,
443 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
444 	.cra_ctxsize		= sizeof(struct serpent_lrw_ctx),
445 	.cra_alignmask		= 0,
446 	.cra_type		= &crypto_blkcipher_type,
447 	.cra_module		= THIS_MODULE,
448 	.cra_exit		= lrw_serpent_exit_tfm,
449 	.cra_u = {
450 		.blkcipher = {
451 			.min_keysize	= SERPENT_MIN_KEY_SIZE +
452 					  SERPENT_BLOCK_SIZE,
453 			.max_keysize	= SERPENT_MAX_KEY_SIZE +
454 					  SERPENT_BLOCK_SIZE,
455 			.ivsize		= SERPENT_BLOCK_SIZE,
456 			.setkey		= lrw_serpent_setkey,
457 			.encrypt	= lrw_encrypt,
458 			.decrypt	= lrw_decrypt,
459 		},
460 	},
461 }, {
462 	.cra_name		= "__xts-serpent-avx",
463 	.cra_driver_name	= "__driver-xts-serpent-avx",
464 	.cra_priority		= 0,
465 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
466 				  CRYPTO_ALG_INTERNAL,
467 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
468 	.cra_ctxsize		= sizeof(struct serpent_xts_ctx),
469 	.cra_alignmask		= 0,
470 	.cra_type		= &crypto_blkcipher_type,
471 	.cra_module		= THIS_MODULE,
472 	.cra_u = {
473 		.blkcipher = {
474 			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
475 			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
476 			.ivsize		= SERPENT_BLOCK_SIZE,
477 			.setkey		= xts_serpent_setkey,
478 			.encrypt	= xts_encrypt,
479 			.decrypt	= xts_decrypt,
480 		},
481 	},
482 }, {
483 	.cra_name		= "ecb(serpent)",
484 	.cra_driver_name	= "ecb-serpent-avx",
485 	.cra_priority		= 500,
486 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
487 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
488 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
489 	.cra_alignmask		= 0,
490 	.cra_type		= &crypto_ablkcipher_type,
491 	.cra_module		= THIS_MODULE,
492 	.cra_init		= ablk_init,
493 	.cra_exit		= ablk_exit,
494 	.cra_u = {
495 		.ablkcipher = {
496 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
497 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
498 			.setkey		= ablk_set_key,
499 			.encrypt	= ablk_encrypt,
500 			.decrypt	= ablk_decrypt,
501 		},
502 	},
503 }, {
504 	.cra_name		= "cbc(serpent)",
505 	.cra_driver_name	= "cbc-serpent-avx",
506 	.cra_priority		= 500,
507 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
508 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
509 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
510 	.cra_alignmask		= 0,
511 	.cra_type		= &crypto_ablkcipher_type,
512 	.cra_module		= THIS_MODULE,
513 	.cra_init		= ablk_init,
514 	.cra_exit		= ablk_exit,
515 	.cra_u = {
516 		.ablkcipher = {
517 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
518 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
519 			.ivsize		= SERPENT_BLOCK_SIZE,
520 			.setkey		= ablk_set_key,
521 			.encrypt	= __ablk_encrypt,
522 			.decrypt	= ablk_decrypt,
523 		},
524 	},
525 }, {
526 	.cra_name		= "ctr(serpent)",
527 	.cra_driver_name	= "ctr-serpent-avx",
528 	.cra_priority		= 500,
529 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
530 	.cra_blocksize		= 1,
531 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
532 	.cra_alignmask		= 0,
533 	.cra_type		= &crypto_ablkcipher_type,
534 	.cra_module		= THIS_MODULE,
535 	.cra_init		= ablk_init,
536 	.cra_exit		= ablk_exit,
537 	.cra_u = {
538 		.ablkcipher = {
539 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
540 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
541 			.ivsize		= SERPENT_BLOCK_SIZE,
542 			.setkey		= ablk_set_key,
543 			.encrypt	= ablk_encrypt,
544 			.decrypt	= ablk_encrypt,
545 			.geniv		= "chainiv",
546 		},
547 	},
548 }, {
549 	.cra_name		= "lrw(serpent)",
550 	.cra_driver_name	= "lrw-serpent-avx",
551 	.cra_priority		= 500,
552 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
553 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
554 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
555 	.cra_alignmask		= 0,
556 	.cra_type		= &crypto_ablkcipher_type,
557 	.cra_module		= THIS_MODULE,
558 	.cra_init		= ablk_init,
559 	.cra_exit		= ablk_exit,
560 	.cra_u = {
561 		.ablkcipher = {
562 			.min_keysize	= SERPENT_MIN_KEY_SIZE +
563 					  SERPENT_BLOCK_SIZE,
564 			.max_keysize	= SERPENT_MAX_KEY_SIZE +
565 					  SERPENT_BLOCK_SIZE,
566 			.ivsize		= SERPENT_BLOCK_SIZE,
567 			.setkey		= ablk_set_key,
568 			.encrypt	= ablk_encrypt,
569 			.decrypt	= ablk_decrypt,
570 		},
571 	},
572 }, {
573 	.cra_name		= "xts(serpent)",
574 	.cra_driver_name	= "xts-serpent-avx",
575 	.cra_priority		= 500,
576 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
577 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
578 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
579 	.cra_alignmask		= 0,
580 	.cra_type		= &crypto_ablkcipher_type,
581 	.cra_module		= THIS_MODULE,
582 	.cra_init		= ablk_init,
583 	.cra_exit		= ablk_exit,
584 	.cra_u = {
585 		.ablkcipher = {
586 			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
587 			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
588 			.ivsize		= SERPENT_BLOCK_SIZE,
589 			.setkey		= ablk_set_key,
590 			.encrypt	= ablk_encrypt,
591 			.decrypt	= ablk_decrypt,
592 		},
593 	},
594 } };
595 
596 static int __init serpent_init(void)
597 {
598 	const char *feature_name;
599 
600 	if (!cpu_has_xfeatures(XSTATE_SSE | XSTATE_YMM, &feature_name)) {
601 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
602 		return -ENODEV;
603 	}
604 
605 	return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
606 }
607 
608 static void __exit serpent_exit(void)
609 {
610 	crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
611 }
612 
613 module_init(serpent_init);
614 module_exit(serpent_exit);
615 
616 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
617 MODULE_LICENSE("GPL");
618 MODULE_ALIAS_CRYPTO("serpent");
619