xref: /linux/arch/x86/crypto/camellia_aesni_avx2_glue.c (revision 93df8a1ed6231727c5db94a80b1a6bd5ee67cec3)
1 /*
2  * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
3  *
4  * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  */
12 
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <linux/crypto.h>
16 #include <linux/err.h>
17 #include <crypto/ablk_helper.h>
18 #include <crypto/algapi.h>
19 #include <crypto/ctr.h>
20 #include <crypto/lrw.h>
21 #include <crypto/xts.h>
22 #include <asm/fpu/api.h>
23 #include <asm/crypto/camellia.h>
24 #include <asm/crypto/glue_helper.h>
25 
26 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
27 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
28 
29 /* 32-way AVX2/AES-NI parallel cipher functions */
30 asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
31 				       const u8 *src);
32 asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
33 				       const u8 *src);
34 
35 asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
36 				       const u8 *src);
37 asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
38 				   const u8 *src, le128 *iv);
39 
40 asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
41 				       const u8 *src, le128 *iv);
42 asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
43 				       const u8 *src, le128 *iv);
44 
45 static const struct common_glue_ctx camellia_enc = {
46 	.num_funcs = 4,
47 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
48 
49 	.funcs = { {
50 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
51 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
52 	}, {
53 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
54 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
55 	}, {
56 		.num_blocks = 2,
57 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
58 	}, {
59 		.num_blocks = 1,
60 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
61 	} }
62 };
63 
64 static const struct common_glue_ctx camellia_ctr = {
65 	.num_funcs = 4,
66 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
67 
68 	.funcs = { {
69 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
70 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
71 	}, {
72 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
73 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
74 	}, {
75 		.num_blocks = 2,
76 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
77 	}, {
78 		.num_blocks = 1,
79 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
80 	} }
81 };
82 
83 static const struct common_glue_ctx camellia_enc_xts = {
84 	.num_funcs = 3,
85 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
86 
87 	.funcs = { {
88 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
89 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
90 	}, {
91 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
92 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
93 	}, {
94 		.num_blocks = 1,
95 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
96 	} }
97 };
98 
99 static const struct common_glue_ctx camellia_dec = {
100 	.num_funcs = 4,
101 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
102 
103 	.funcs = { {
104 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
105 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
106 	}, {
107 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
108 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
109 	}, {
110 		.num_blocks = 2,
111 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
112 	}, {
113 		.num_blocks = 1,
114 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
115 	} }
116 };
117 
118 static const struct common_glue_ctx camellia_dec_cbc = {
119 	.num_funcs = 4,
120 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
121 
122 	.funcs = { {
123 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
124 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
125 	}, {
126 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
127 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
128 	}, {
129 		.num_blocks = 2,
130 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
131 	}, {
132 		.num_blocks = 1,
133 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
134 	} }
135 };
136 
137 static const struct common_glue_ctx camellia_dec_xts = {
138 	.num_funcs = 3,
139 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
140 
141 	.funcs = { {
142 		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
143 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
144 	}, {
145 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
146 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
147 	}, {
148 		.num_blocks = 1,
149 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
150 	} }
151 };
152 
153 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
154 		       struct scatterlist *src, unsigned int nbytes)
155 {
156 	return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
157 }
158 
159 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
160 		       struct scatterlist *src, unsigned int nbytes)
161 {
162 	return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
163 }
164 
165 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
166 		       struct scatterlist *src, unsigned int nbytes)
167 {
168 	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
169 				       dst, src, nbytes);
170 }
171 
172 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
173 		       struct scatterlist *src, unsigned int nbytes)
174 {
175 	return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
176 				       nbytes);
177 }
178 
179 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
180 		     struct scatterlist *src, unsigned int nbytes)
181 {
182 	return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
183 }
184 
185 static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
186 {
187 	return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
188 			      CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
189 			      nbytes);
190 }
191 
192 static inline void camellia_fpu_end(bool fpu_enabled)
193 {
194 	glue_fpu_end(fpu_enabled);
195 }
196 
197 static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
198 			   unsigned int key_len)
199 {
200 	return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
201 				 &tfm->crt_flags);
202 }
203 
204 struct crypt_priv {
205 	struct camellia_ctx *ctx;
206 	bool fpu_enabled;
207 };
208 
209 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
210 {
211 	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
212 	struct crypt_priv *ctx = priv;
213 	int i;
214 
215 	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
216 
217 	if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
218 		camellia_ecb_enc_32way(ctx->ctx, srcdst, srcdst);
219 		srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
220 		nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
221 	}
222 
223 	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
224 		camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
225 		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
226 		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
227 	}
228 
229 	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
230 		camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
231 		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
232 		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
233 	}
234 
235 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
236 		camellia_enc_blk(ctx->ctx, srcdst, srcdst);
237 }
238 
239 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
240 {
241 	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
242 	struct crypt_priv *ctx = priv;
243 	int i;
244 
245 	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
246 
247 	if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
248 		camellia_ecb_dec_32way(ctx->ctx, srcdst, srcdst);
249 		srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
250 		nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
251 	}
252 
253 	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
254 		camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
255 		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
256 		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
257 	}
258 
259 	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
260 		camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
261 		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
262 		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
263 	}
264 
265 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
266 		camellia_dec_blk(ctx->ctx, srcdst, srcdst);
267 }
268 
269 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
270 		       struct scatterlist *src, unsigned int nbytes)
271 {
272 	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
273 	be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
274 	struct crypt_priv crypt_ctx = {
275 		.ctx = &ctx->camellia_ctx,
276 		.fpu_enabled = false,
277 	};
278 	struct lrw_crypt_req req = {
279 		.tbuf = buf,
280 		.tbuflen = sizeof(buf),
281 
282 		.table_ctx = &ctx->lrw_table,
283 		.crypt_ctx = &crypt_ctx,
284 		.crypt_fn = encrypt_callback,
285 	};
286 	int ret;
287 
288 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
289 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
290 	camellia_fpu_end(crypt_ctx.fpu_enabled);
291 
292 	return ret;
293 }
294 
295 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
296 		       struct scatterlist *src, unsigned int nbytes)
297 {
298 	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
299 	be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
300 	struct crypt_priv crypt_ctx = {
301 		.ctx = &ctx->camellia_ctx,
302 		.fpu_enabled = false,
303 	};
304 	struct lrw_crypt_req req = {
305 		.tbuf = buf,
306 		.tbuflen = sizeof(buf),
307 
308 		.table_ctx = &ctx->lrw_table,
309 		.crypt_ctx = &crypt_ctx,
310 		.crypt_fn = decrypt_callback,
311 	};
312 	int ret;
313 
314 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
315 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
316 	camellia_fpu_end(crypt_ctx.fpu_enabled);
317 
318 	return ret;
319 }
320 
321 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
322 		       struct scatterlist *src, unsigned int nbytes)
323 {
324 	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
325 
326 	return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
327 				     XTS_TWEAK_CAST(camellia_enc_blk),
328 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
329 }
330 
331 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
332 		       struct scatterlist *src, unsigned int nbytes)
333 {
334 	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
335 
336 	return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
337 				     XTS_TWEAK_CAST(camellia_enc_blk),
338 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
339 }
340 
341 static struct crypto_alg cmll_algs[10] = { {
342 	.cra_name		= "__ecb-camellia-aesni-avx2",
343 	.cra_driver_name	= "__driver-ecb-camellia-aesni-avx2",
344 	.cra_priority		= 0,
345 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
346 				  CRYPTO_ALG_INTERNAL,
347 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
348 	.cra_ctxsize		= sizeof(struct camellia_ctx),
349 	.cra_alignmask		= 0,
350 	.cra_type		= &crypto_blkcipher_type,
351 	.cra_module		= THIS_MODULE,
352 	.cra_u = {
353 		.blkcipher = {
354 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
355 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
356 			.setkey		= camellia_setkey,
357 			.encrypt	= ecb_encrypt,
358 			.decrypt	= ecb_decrypt,
359 		},
360 	},
361 }, {
362 	.cra_name		= "__cbc-camellia-aesni-avx2",
363 	.cra_driver_name	= "__driver-cbc-camellia-aesni-avx2",
364 	.cra_priority		= 0,
365 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
366 				  CRYPTO_ALG_INTERNAL,
367 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
368 	.cra_ctxsize		= sizeof(struct camellia_ctx),
369 	.cra_alignmask		= 0,
370 	.cra_type		= &crypto_blkcipher_type,
371 	.cra_module		= THIS_MODULE,
372 	.cra_u = {
373 		.blkcipher = {
374 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
375 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
376 			.setkey		= camellia_setkey,
377 			.encrypt	= cbc_encrypt,
378 			.decrypt	= cbc_decrypt,
379 		},
380 	},
381 }, {
382 	.cra_name		= "__ctr-camellia-aesni-avx2",
383 	.cra_driver_name	= "__driver-ctr-camellia-aesni-avx2",
384 	.cra_priority		= 0,
385 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
386 				  CRYPTO_ALG_INTERNAL,
387 	.cra_blocksize		= 1,
388 	.cra_ctxsize		= sizeof(struct camellia_ctx),
389 	.cra_alignmask		= 0,
390 	.cra_type		= &crypto_blkcipher_type,
391 	.cra_module		= THIS_MODULE,
392 	.cra_u = {
393 		.blkcipher = {
394 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
395 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
396 			.ivsize		= CAMELLIA_BLOCK_SIZE,
397 			.setkey		= camellia_setkey,
398 			.encrypt	= ctr_crypt,
399 			.decrypt	= ctr_crypt,
400 		},
401 	},
402 }, {
403 	.cra_name		= "__lrw-camellia-aesni-avx2",
404 	.cra_driver_name	= "__driver-lrw-camellia-aesni-avx2",
405 	.cra_priority		= 0,
406 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
407 				  CRYPTO_ALG_INTERNAL,
408 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
409 	.cra_ctxsize		= sizeof(struct camellia_lrw_ctx),
410 	.cra_alignmask		= 0,
411 	.cra_type		= &crypto_blkcipher_type,
412 	.cra_module		= THIS_MODULE,
413 	.cra_exit		= lrw_camellia_exit_tfm,
414 	.cra_u = {
415 		.blkcipher = {
416 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
417 					  CAMELLIA_BLOCK_SIZE,
418 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
419 					  CAMELLIA_BLOCK_SIZE,
420 			.ivsize		= CAMELLIA_BLOCK_SIZE,
421 			.setkey		= lrw_camellia_setkey,
422 			.encrypt	= lrw_encrypt,
423 			.decrypt	= lrw_decrypt,
424 		},
425 	},
426 }, {
427 	.cra_name		= "__xts-camellia-aesni-avx2",
428 	.cra_driver_name	= "__driver-xts-camellia-aesni-avx2",
429 	.cra_priority		= 0,
430 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
431 				  CRYPTO_ALG_INTERNAL,
432 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
433 	.cra_ctxsize		= sizeof(struct camellia_xts_ctx),
434 	.cra_alignmask		= 0,
435 	.cra_type		= &crypto_blkcipher_type,
436 	.cra_module		= THIS_MODULE,
437 	.cra_u = {
438 		.blkcipher = {
439 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
440 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
441 			.ivsize		= CAMELLIA_BLOCK_SIZE,
442 			.setkey		= xts_camellia_setkey,
443 			.encrypt	= xts_encrypt,
444 			.decrypt	= xts_decrypt,
445 		},
446 	},
447 }, {
448 	.cra_name		= "ecb(camellia)",
449 	.cra_driver_name	= "ecb-camellia-aesni-avx2",
450 	.cra_priority		= 500,
451 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
452 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
453 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
454 	.cra_alignmask		= 0,
455 	.cra_type		= &crypto_ablkcipher_type,
456 	.cra_module		= THIS_MODULE,
457 	.cra_init		= ablk_init,
458 	.cra_exit		= ablk_exit,
459 	.cra_u = {
460 		.ablkcipher = {
461 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
462 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
463 			.setkey		= ablk_set_key,
464 			.encrypt	= ablk_encrypt,
465 			.decrypt	= ablk_decrypt,
466 		},
467 	},
468 }, {
469 	.cra_name		= "cbc(camellia)",
470 	.cra_driver_name	= "cbc-camellia-aesni-avx2",
471 	.cra_priority		= 500,
472 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
473 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
474 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
475 	.cra_alignmask		= 0,
476 	.cra_type		= &crypto_ablkcipher_type,
477 	.cra_module		= THIS_MODULE,
478 	.cra_init		= ablk_init,
479 	.cra_exit		= ablk_exit,
480 	.cra_u = {
481 		.ablkcipher = {
482 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
483 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
484 			.ivsize		= CAMELLIA_BLOCK_SIZE,
485 			.setkey		= ablk_set_key,
486 			.encrypt	= __ablk_encrypt,
487 			.decrypt	= ablk_decrypt,
488 		},
489 	},
490 }, {
491 	.cra_name		= "ctr(camellia)",
492 	.cra_driver_name	= "ctr-camellia-aesni-avx2",
493 	.cra_priority		= 500,
494 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
495 	.cra_blocksize		= 1,
496 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
497 	.cra_alignmask		= 0,
498 	.cra_type		= &crypto_ablkcipher_type,
499 	.cra_module		= THIS_MODULE,
500 	.cra_init		= ablk_init,
501 	.cra_exit		= ablk_exit,
502 	.cra_u = {
503 		.ablkcipher = {
504 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
505 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
506 			.ivsize		= CAMELLIA_BLOCK_SIZE,
507 			.setkey		= ablk_set_key,
508 			.encrypt	= ablk_encrypt,
509 			.decrypt	= ablk_encrypt,
510 			.geniv		= "chainiv",
511 		},
512 	},
513 }, {
514 	.cra_name		= "lrw(camellia)",
515 	.cra_driver_name	= "lrw-camellia-aesni-avx2",
516 	.cra_priority		= 500,
517 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
518 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
519 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
520 	.cra_alignmask		= 0,
521 	.cra_type		= &crypto_ablkcipher_type,
522 	.cra_module		= THIS_MODULE,
523 	.cra_init		= ablk_init,
524 	.cra_exit		= ablk_exit,
525 	.cra_u = {
526 		.ablkcipher = {
527 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
528 					  CAMELLIA_BLOCK_SIZE,
529 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
530 					  CAMELLIA_BLOCK_SIZE,
531 			.ivsize		= CAMELLIA_BLOCK_SIZE,
532 			.setkey		= ablk_set_key,
533 			.encrypt	= ablk_encrypt,
534 			.decrypt	= ablk_decrypt,
535 		},
536 	},
537 }, {
538 	.cra_name		= "xts(camellia)",
539 	.cra_driver_name	= "xts-camellia-aesni-avx2",
540 	.cra_priority		= 500,
541 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
542 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
543 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
544 	.cra_alignmask		= 0,
545 	.cra_type		= &crypto_ablkcipher_type,
546 	.cra_module		= THIS_MODULE,
547 	.cra_init		= ablk_init,
548 	.cra_exit		= ablk_exit,
549 	.cra_u = {
550 		.ablkcipher = {
551 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
552 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
553 			.ivsize		= CAMELLIA_BLOCK_SIZE,
554 			.setkey		= ablk_set_key,
555 			.encrypt	= ablk_encrypt,
556 			.decrypt	= ablk_decrypt,
557 		},
558 	},
559 } };
560 
561 static int __init camellia_aesni_init(void)
562 {
563 	const char *feature_name;
564 
565 	if (!cpu_has_avx2 || !cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
566 		pr_info("AVX2 or AES-NI instructions are not detected.\n");
567 		return -ENODEV;
568 	}
569 
570 	if (!cpu_has_xfeatures(XSTATE_SSE | XSTATE_YMM, &feature_name)) {
571 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
572 		return -ENODEV;
573 	}
574 
575 	return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
576 }
577 
578 static void __exit camellia_aesni_fini(void)
579 {
580 	crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
581 }
582 
583 module_init(camellia_aesni_init);
584 module_exit(camellia_aesni_fini);
585 
586 MODULE_LICENSE("GPL");
587 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
588 MODULE_ALIAS_CRYPTO("camellia");
589 MODULE_ALIAS_CRYPTO("camellia-asm");
590