xref: /linux/arch/x86/crypto/aria_aesni_avx2_glue.c (revision 2f0a7504530c24f55daec7d2364d933bb1a1fa68)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX2/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18 
19 asmlinkage void aria_aesni_avx2_encrypt_32way(const void *ctx, u8 *dst,
20 					      const u8 *src);
21 EXPORT_SYMBOL_GPL(aria_aesni_avx2_encrypt_32way);
22 asmlinkage void aria_aesni_avx2_decrypt_32way(const void *ctx, u8 *dst,
23 					      const u8 *src);
24 EXPORT_SYMBOL_GPL(aria_aesni_avx2_decrypt_32way);
25 asmlinkage void aria_aesni_avx2_ctr_crypt_32way(const void *ctx, u8 *dst,
26 						const u8 *src,
27 						u8 *keystream, u8 *iv);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx2_ctr_crypt_32way);
29 asmlinkage void aria_aesni_avx2_gfni_encrypt_32way(const void *ctx, u8 *dst,
30 						   const u8 *src);
31 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_encrypt_32way);
32 asmlinkage void aria_aesni_avx2_gfni_decrypt_32way(const void *ctx, u8 *dst,
33 						   const u8 *src);
34 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_decrypt_32way);
35 asmlinkage void aria_aesni_avx2_gfni_ctr_crypt_32way(const void *ctx, u8 *dst,
36 						     const u8 *src,
37 						     u8 *keystream, u8 *iv);
38 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_ctr_crypt_32way);
39 
40 static struct aria_avx_ops aria_ops;
41 
42 struct aria_avx2_request_ctx {
43 	u8 keystream[ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE];
44 };
45 
ecb_do_encrypt(struct skcipher_request * req,const u32 * rkey)46 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
47 {
48 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
49 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_encrypt_32way);
50 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
51 	ECB_BLOCK(1, aria_encrypt);
52 	ECB_WALK_END();
53 }
54 
ecb_do_decrypt(struct skcipher_request * req,const u32 * rkey)55 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
56 {
57 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
58 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_decrypt_32way);
59 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
60 	ECB_BLOCK(1, aria_decrypt);
61 	ECB_WALK_END();
62 }
63 
aria_avx2_ecb_encrypt(struct skcipher_request * req)64 static int aria_avx2_ecb_encrypt(struct skcipher_request *req)
65 {
66 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
67 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
68 
69 	return ecb_do_encrypt(req, ctx->enc_key[0]);
70 }
71 
aria_avx2_ecb_decrypt(struct skcipher_request * req)72 static int aria_avx2_ecb_decrypt(struct skcipher_request *req)
73 {
74 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
75 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
76 
77 	return ecb_do_decrypt(req, ctx->dec_key[0]);
78 }
79 
aria_avx2_set_key(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)80 static int aria_avx2_set_key(struct crypto_skcipher *tfm, const u8 *key,
81 			    unsigned int keylen)
82 {
83 	return aria_set_key(&tfm->base, key, keylen);
84 }
85 
aria_avx2_ctr_encrypt(struct skcipher_request * req)86 static int aria_avx2_ctr_encrypt(struct skcipher_request *req)
87 {
88 	struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req);
89 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
90 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
91 	struct skcipher_walk walk;
92 	unsigned int nbytes;
93 	int err;
94 
95 	err = skcipher_walk_virt(&walk, req, false);
96 
97 	while ((nbytes = walk.nbytes) > 0) {
98 		const u8 *src = walk.src.virt.addr;
99 		u8 *dst = walk.dst.virt.addr;
100 
101 		while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) {
102 			kernel_fpu_begin();
103 			aria_ops.aria_ctr_crypt_32way(ctx, dst, src,
104 						      &req_ctx->keystream[0],
105 						      walk.iv);
106 			kernel_fpu_end();
107 			dst += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
108 			src += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
109 			nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
110 		}
111 
112 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
113 			kernel_fpu_begin();
114 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
115 						      &req_ctx->keystream[0],
116 						      walk.iv);
117 			kernel_fpu_end();
118 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
119 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
120 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
121 		}
122 
123 		while (nbytes >= ARIA_BLOCK_SIZE) {
124 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
125 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
126 
127 			aria_encrypt(ctx, &req_ctx->keystream[0],
128 				     &req_ctx->keystream[0]);
129 
130 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
131 				       ARIA_BLOCK_SIZE);
132 			dst += ARIA_BLOCK_SIZE;
133 			src += ARIA_BLOCK_SIZE;
134 			nbytes -= ARIA_BLOCK_SIZE;
135 		}
136 
137 		if (walk.nbytes == walk.total && nbytes > 0) {
138 			memcpy(&req_ctx->keystream[0], walk.iv,
139 			       ARIA_BLOCK_SIZE);
140 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
141 
142 			aria_encrypt(ctx, &req_ctx->keystream[0],
143 				     &req_ctx->keystream[0]);
144 
145 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
146 				       nbytes);
147 			dst += nbytes;
148 			src += nbytes;
149 			nbytes = 0;
150 		}
151 		err = skcipher_walk_done(&walk, nbytes);
152 	}
153 
154 	return err;
155 }
156 
aria_avx2_init_tfm(struct crypto_skcipher * tfm)157 static int aria_avx2_init_tfm(struct crypto_skcipher *tfm)
158 {
159 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx2_request_ctx));
160 
161 	return 0;
162 }
163 
164 static struct skcipher_alg aria_algs[] = {
165 	{
166 		.base.cra_name		= "ecb(aria)",
167 		.base.cra_driver_name	= "ecb-aria-avx2",
168 		.base.cra_priority	= 500,
169 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
170 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
171 		.base.cra_module	= THIS_MODULE,
172 		.min_keysize		= ARIA_MIN_KEY_SIZE,
173 		.max_keysize		= ARIA_MAX_KEY_SIZE,
174 		.setkey			= aria_avx2_set_key,
175 		.encrypt		= aria_avx2_ecb_encrypt,
176 		.decrypt		= aria_avx2_ecb_decrypt,
177 	}, {
178 		.base.cra_name		= "ctr(aria)",
179 		.base.cra_driver_name	= "ctr-aria-avx2",
180 		.base.cra_priority	= 500,
181 		.base.cra_flags		= CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE,
182 		.base.cra_blocksize	= 1,
183 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
184 		.base.cra_module	= THIS_MODULE,
185 		.min_keysize		= ARIA_MIN_KEY_SIZE,
186 		.max_keysize		= ARIA_MAX_KEY_SIZE,
187 		.ivsize			= ARIA_BLOCK_SIZE,
188 		.chunksize		= ARIA_BLOCK_SIZE,
189 		.setkey			= aria_avx2_set_key,
190 		.encrypt		= aria_avx2_ctr_encrypt,
191 		.decrypt		= aria_avx2_ctr_encrypt,
192 		.init                   = aria_avx2_init_tfm,
193 	}
194 };
195 
aria_avx2_init(void)196 static int __init aria_avx2_init(void)
197 {
198 	const char *feature_name;
199 
200 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
201 	    !boot_cpu_has(X86_FEATURE_AVX2) ||
202 	    !boot_cpu_has(X86_FEATURE_AES) ||
203 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
204 		pr_info("AVX2 or AES-NI instructions are not detected.\n");
205 		return -ENODEV;
206 	}
207 
208 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
209 				&feature_name)) {
210 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
211 		return -ENODEV;
212 	}
213 
214 	if (boot_cpu_has(X86_FEATURE_GFNI)) {
215 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
216 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
217 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
218 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_gfni_encrypt_32way;
219 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_gfni_decrypt_32way;
220 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_gfni_ctr_crypt_32way;
221 	} else {
222 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
223 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
224 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
225 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_encrypt_32way;
226 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_decrypt_32way;
227 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_ctr_crypt_32way;
228 	}
229 
230 	return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
231 }
232 
aria_avx2_exit(void)233 static void __exit aria_avx2_exit(void)
234 {
235 	crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
236 }
237 
238 module_init(aria_avx2_init);
239 module_exit(aria_avx2_exit);
240 
241 MODULE_LICENSE("GPL");
242 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
243 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX2/AES-NI/GFNI optimized");
244 MODULE_ALIAS_CRYPTO("aria");
245 MODULE_ALIAS_CRYPTO("aria-aesni-avx2");
246