xref: /linux/arch/x86/crypto/aria_aesni_avx2_glue.c (revision 23ca32e4ead48f68e37000f2552b973ef1439acb)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX2/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/module.h>
13 #include <linux/types.h>
14 
15 #include "ecb_cbc_helpers.h"
16 #include "aria-avx.h"
17 
18 asmlinkage void aria_aesni_avx2_encrypt_32way(const void *ctx, u8 *dst,
19 					      const u8 *src);
20 EXPORT_SYMBOL_GPL(aria_aesni_avx2_encrypt_32way);
21 asmlinkage void aria_aesni_avx2_decrypt_32way(const void *ctx, u8 *dst,
22 					      const u8 *src);
23 EXPORT_SYMBOL_GPL(aria_aesni_avx2_decrypt_32way);
24 asmlinkage void aria_aesni_avx2_ctr_crypt_32way(const void *ctx, u8 *dst,
25 						const u8 *src,
26 						u8 *keystream, u8 *iv);
27 EXPORT_SYMBOL_GPL(aria_aesni_avx2_ctr_crypt_32way);
28 #ifdef CONFIG_AS_GFNI
29 asmlinkage void aria_aesni_avx2_gfni_encrypt_32way(const void *ctx, u8 *dst,
30 						   const u8 *src);
31 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_encrypt_32way);
32 asmlinkage void aria_aesni_avx2_gfni_decrypt_32way(const void *ctx, u8 *dst,
33 						   const u8 *src);
34 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_decrypt_32way);
35 asmlinkage void aria_aesni_avx2_gfni_ctr_crypt_32way(const void *ctx, u8 *dst,
36 						     const u8 *src,
37 						     u8 *keystream, u8 *iv);
38 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_ctr_crypt_32way);
39 #endif /* CONFIG_AS_GFNI */
40 
41 static struct aria_avx_ops aria_ops;
42 
43 struct aria_avx2_request_ctx {
44 	u8 keystream[ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE];
45 };
46 
47 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
48 {
49 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
50 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_encrypt_32way);
51 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
52 	ECB_BLOCK(1, aria_encrypt);
53 	ECB_WALK_END();
54 }
55 
56 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
57 {
58 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
59 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_decrypt_32way);
60 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
61 	ECB_BLOCK(1, aria_decrypt);
62 	ECB_WALK_END();
63 }
64 
65 static int aria_avx2_ecb_encrypt(struct skcipher_request *req)
66 {
67 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
68 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
69 
70 	return ecb_do_encrypt(req, ctx->enc_key[0]);
71 }
72 
73 static int aria_avx2_ecb_decrypt(struct skcipher_request *req)
74 {
75 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
76 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
77 
78 	return ecb_do_decrypt(req, ctx->dec_key[0]);
79 }
80 
81 static int aria_avx2_set_key(struct crypto_skcipher *tfm, const u8 *key,
82 			    unsigned int keylen)
83 {
84 	return aria_set_key(&tfm->base, key, keylen);
85 }
86 
87 static int aria_avx2_ctr_encrypt(struct skcipher_request *req)
88 {
89 	struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req);
90 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
91 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
92 	struct skcipher_walk walk;
93 	unsigned int nbytes;
94 	int err;
95 
96 	err = skcipher_walk_virt(&walk, req, false);
97 
98 	while ((nbytes = walk.nbytes) > 0) {
99 		const u8 *src = walk.src.virt.addr;
100 		u8 *dst = walk.dst.virt.addr;
101 
102 		while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) {
103 			kernel_fpu_begin();
104 			aria_ops.aria_ctr_crypt_32way(ctx, dst, src,
105 						      &req_ctx->keystream[0],
106 						      walk.iv);
107 			kernel_fpu_end();
108 			dst += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
109 			src += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
110 			nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
111 		}
112 
113 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
114 			kernel_fpu_begin();
115 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
116 						      &req_ctx->keystream[0],
117 						      walk.iv);
118 			kernel_fpu_end();
119 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
120 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
121 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
122 		}
123 
124 		while (nbytes >= ARIA_BLOCK_SIZE) {
125 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
126 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
127 
128 			aria_encrypt(ctx, &req_ctx->keystream[0],
129 				     &req_ctx->keystream[0]);
130 
131 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
132 				       ARIA_BLOCK_SIZE);
133 			dst += ARIA_BLOCK_SIZE;
134 			src += ARIA_BLOCK_SIZE;
135 			nbytes -= ARIA_BLOCK_SIZE;
136 		}
137 
138 		if (walk.nbytes == walk.total && nbytes > 0) {
139 			memcpy(&req_ctx->keystream[0], walk.iv,
140 			       ARIA_BLOCK_SIZE);
141 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
142 
143 			aria_encrypt(ctx, &req_ctx->keystream[0],
144 				     &req_ctx->keystream[0]);
145 
146 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
147 				       nbytes);
148 			dst += nbytes;
149 			src += nbytes;
150 			nbytes = 0;
151 		}
152 		err = skcipher_walk_done(&walk, nbytes);
153 	}
154 
155 	return err;
156 }
157 
158 static int aria_avx2_init_tfm(struct crypto_skcipher *tfm)
159 {
160 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx2_request_ctx));
161 
162 	return 0;
163 }
164 
165 static struct skcipher_alg aria_algs[] = {
166 	{
167 		.base.cra_name		= "ecb(aria)",
168 		.base.cra_driver_name	= "ecb-aria-avx2",
169 		.base.cra_priority	= 500,
170 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
171 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
172 		.base.cra_module	= THIS_MODULE,
173 		.min_keysize		= ARIA_MIN_KEY_SIZE,
174 		.max_keysize		= ARIA_MAX_KEY_SIZE,
175 		.setkey			= aria_avx2_set_key,
176 		.encrypt		= aria_avx2_ecb_encrypt,
177 		.decrypt		= aria_avx2_ecb_decrypt,
178 	}, {
179 		.base.cra_name		= "ctr(aria)",
180 		.base.cra_driver_name	= "ctr-aria-avx2",
181 		.base.cra_priority	= 500,
182 		.base.cra_flags		= CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE,
183 		.base.cra_blocksize	= 1,
184 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
185 		.base.cra_module	= THIS_MODULE,
186 		.min_keysize		= ARIA_MIN_KEY_SIZE,
187 		.max_keysize		= ARIA_MAX_KEY_SIZE,
188 		.ivsize			= ARIA_BLOCK_SIZE,
189 		.chunksize		= ARIA_BLOCK_SIZE,
190 		.setkey			= aria_avx2_set_key,
191 		.encrypt		= aria_avx2_ctr_encrypt,
192 		.decrypt		= aria_avx2_ctr_encrypt,
193 		.init                   = aria_avx2_init_tfm,
194 	}
195 };
196 
197 static int __init aria_avx2_init(void)
198 {
199 	const char *feature_name;
200 
201 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
202 	    !boot_cpu_has(X86_FEATURE_AVX2) ||
203 	    !boot_cpu_has(X86_FEATURE_AES) ||
204 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
205 		pr_info("AVX2 or AES-NI instructions are not detected.\n");
206 		return -ENODEV;
207 	}
208 
209 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
210 				&feature_name)) {
211 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
212 		return -ENODEV;
213 	}
214 
215 	if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
216 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
217 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
218 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
219 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_gfni_encrypt_32way;
220 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_gfni_decrypt_32way;
221 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_gfni_ctr_crypt_32way;
222 	} else {
223 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
224 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
225 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
226 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_encrypt_32way;
227 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_decrypt_32way;
228 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_ctr_crypt_32way;
229 	}
230 
231 	return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
232 }
233 
234 static void __exit aria_avx2_exit(void)
235 {
236 	crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
237 }
238 
239 module_init(aria_avx2_init);
240 module_exit(aria_avx2_exit);
241 
242 MODULE_LICENSE("GPL");
243 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
244 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX2/AES-NI/GFNI optimized");
245 MODULE_ALIAS_CRYPTO("aria");
246 MODULE_ALIAS_CRYPTO("aria-aesni-avx2");
247