xref: /linux/arch/x86/crypto/aria_aesni_avx2_glue.c (revision 9fd2da71c301184d98fe37674ca8d017d1ce6600)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX2/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18 
19 asmlinkage void aria_aesni_avx2_encrypt_32way(const void *ctx, u8 *dst,
20 					      const u8 *src);
21 EXPORT_SYMBOL_GPL(aria_aesni_avx2_encrypt_32way);
22 asmlinkage void aria_aesni_avx2_decrypt_32way(const void *ctx, u8 *dst,
23 					      const u8 *src);
24 EXPORT_SYMBOL_GPL(aria_aesni_avx2_decrypt_32way);
25 asmlinkage void aria_aesni_avx2_ctr_crypt_32way(const void *ctx, u8 *dst,
26 						const u8 *src,
27 						u8 *keystream, u8 *iv);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx2_ctr_crypt_32way);
29 #ifdef CONFIG_AS_GFNI
30 asmlinkage void aria_aesni_avx2_gfni_encrypt_32way(const void *ctx, u8 *dst,
31 						   const u8 *src);
32 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_encrypt_32way);
33 asmlinkage void aria_aesni_avx2_gfni_decrypt_32way(const void *ctx, u8 *dst,
34 						   const u8 *src);
35 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_decrypt_32way);
36 asmlinkage void aria_aesni_avx2_gfni_ctr_crypt_32way(const void *ctx, u8 *dst,
37 						     const u8 *src,
38 						     u8 *keystream, u8 *iv);
39 EXPORT_SYMBOL_GPL(aria_aesni_avx2_gfni_ctr_crypt_32way);
40 #endif /* CONFIG_AS_GFNI */
41 
42 static struct aria_avx_ops aria_ops;
43 
44 struct aria_avx2_request_ctx {
45 	u8 keystream[ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE];
46 };
47 
48 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
49 {
50 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
51 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_encrypt_32way);
52 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
53 	ECB_BLOCK(1, aria_encrypt);
54 	ECB_WALK_END();
55 }
56 
57 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
58 {
59 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
60 	ECB_BLOCK(ARIA_AESNI_AVX2_PARALLEL_BLOCKS, aria_ops.aria_decrypt_32way);
61 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
62 	ECB_BLOCK(1, aria_decrypt);
63 	ECB_WALK_END();
64 }
65 
66 static int aria_avx2_ecb_encrypt(struct skcipher_request *req)
67 {
68 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
69 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
70 
71 	return ecb_do_encrypt(req, ctx->enc_key[0]);
72 }
73 
74 static int aria_avx2_ecb_decrypt(struct skcipher_request *req)
75 {
76 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
77 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
78 
79 	return ecb_do_decrypt(req, ctx->dec_key[0]);
80 }
81 
82 static int aria_avx2_set_key(struct crypto_skcipher *tfm, const u8 *key,
83 			    unsigned int keylen)
84 {
85 	return aria_set_key(&tfm->base, key, keylen);
86 }
87 
88 static int aria_avx2_ctr_encrypt(struct skcipher_request *req)
89 {
90 	struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req);
91 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
92 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
93 	struct skcipher_walk walk;
94 	unsigned int nbytes;
95 	int err;
96 
97 	err = skcipher_walk_virt(&walk, req, false);
98 
99 	while ((nbytes = walk.nbytes) > 0) {
100 		const u8 *src = walk.src.virt.addr;
101 		u8 *dst = walk.dst.virt.addr;
102 
103 		while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) {
104 			kernel_fpu_begin();
105 			aria_ops.aria_ctr_crypt_32way(ctx, dst, src,
106 						      &req_ctx->keystream[0],
107 						      walk.iv);
108 			kernel_fpu_end();
109 			dst += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
110 			src += ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
111 			nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE;
112 		}
113 
114 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
115 			kernel_fpu_begin();
116 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
117 						      &req_ctx->keystream[0],
118 						      walk.iv);
119 			kernel_fpu_end();
120 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
121 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
122 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
123 		}
124 
125 		while (nbytes >= ARIA_BLOCK_SIZE) {
126 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
127 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
128 
129 			aria_encrypt(ctx, &req_ctx->keystream[0],
130 				     &req_ctx->keystream[0]);
131 
132 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
133 				       ARIA_BLOCK_SIZE);
134 			dst += ARIA_BLOCK_SIZE;
135 			src += ARIA_BLOCK_SIZE;
136 			nbytes -= ARIA_BLOCK_SIZE;
137 		}
138 
139 		if (walk.nbytes == walk.total && nbytes > 0) {
140 			memcpy(&req_ctx->keystream[0], walk.iv,
141 			       ARIA_BLOCK_SIZE);
142 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
143 
144 			aria_encrypt(ctx, &req_ctx->keystream[0],
145 				     &req_ctx->keystream[0]);
146 
147 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
148 				       nbytes);
149 			dst += nbytes;
150 			src += nbytes;
151 			nbytes = 0;
152 		}
153 		err = skcipher_walk_done(&walk, nbytes);
154 	}
155 
156 	return err;
157 }
158 
159 static int aria_avx2_init_tfm(struct crypto_skcipher *tfm)
160 {
161 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx2_request_ctx));
162 
163 	return 0;
164 }
165 
166 static struct skcipher_alg aria_algs[] = {
167 	{
168 		.base.cra_name		= "ecb(aria)",
169 		.base.cra_driver_name	= "ecb-aria-avx2",
170 		.base.cra_priority	= 500,
171 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
172 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
173 		.base.cra_module	= THIS_MODULE,
174 		.min_keysize		= ARIA_MIN_KEY_SIZE,
175 		.max_keysize		= ARIA_MAX_KEY_SIZE,
176 		.setkey			= aria_avx2_set_key,
177 		.encrypt		= aria_avx2_ecb_encrypt,
178 		.decrypt		= aria_avx2_ecb_decrypt,
179 	}, {
180 		.base.cra_name		= "ctr(aria)",
181 		.base.cra_driver_name	= "ctr-aria-avx2",
182 		.base.cra_priority	= 500,
183 		.base.cra_flags		= CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE,
184 		.base.cra_blocksize	= 1,
185 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
186 		.base.cra_module	= THIS_MODULE,
187 		.min_keysize		= ARIA_MIN_KEY_SIZE,
188 		.max_keysize		= ARIA_MAX_KEY_SIZE,
189 		.ivsize			= ARIA_BLOCK_SIZE,
190 		.chunksize		= ARIA_BLOCK_SIZE,
191 		.setkey			= aria_avx2_set_key,
192 		.encrypt		= aria_avx2_ctr_encrypt,
193 		.decrypt		= aria_avx2_ctr_encrypt,
194 		.init                   = aria_avx2_init_tfm,
195 	}
196 };
197 
198 static int __init aria_avx2_init(void)
199 {
200 	const char *feature_name;
201 
202 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
203 	    !boot_cpu_has(X86_FEATURE_AVX2) ||
204 	    !boot_cpu_has(X86_FEATURE_AES) ||
205 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
206 		pr_info("AVX2 or AES-NI instructions are not detected.\n");
207 		return -ENODEV;
208 	}
209 
210 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
211 				&feature_name)) {
212 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
213 		return -ENODEV;
214 	}
215 
216 	if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
217 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
218 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
219 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
220 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_gfni_encrypt_32way;
221 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_gfni_decrypt_32way;
222 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_gfni_ctr_crypt_32way;
223 	} else {
224 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
225 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
226 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
227 		aria_ops.aria_encrypt_32way = aria_aesni_avx2_encrypt_32way;
228 		aria_ops.aria_decrypt_32way = aria_aesni_avx2_decrypt_32way;
229 		aria_ops.aria_ctr_crypt_32way = aria_aesni_avx2_ctr_crypt_32way;
230 	}
231 
232 	return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
233 }
234 
235 static void __exit aria_avx2_exit(void)
236 {
237 	crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
238 }
239 
240 module_init(aria_avx2_init);
241 module_exit(aria_avx2_exit);
242 
243 MODULE_LICENSE("GPL");
244 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
245 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX2/AES-NI/GFNI optimized");
246 MODULE_ALIAS_CRYPTO("aria");
247 MODULE_ALIAS_CRYPTO("aria-aesni-avx2");
248