xref: /linux/arch/x86/crypto/aria_aesni_avx_glue.c (revision fcad9bbf9e1a7de6c53908954ba1b1a1ab11ef1e)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/module.h>
13 #include <linux/types.h>
14 
15 #include "ecb_cbc_helpers.h"
16 #include "aria-avx.h"
17 
18 asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
19 					     const u8 *src);
20 EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way);
21 asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
22 					     const u8 *src);
23 EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way);
24 asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
25 					       const u8 *src,
26 					       u8 *keystream, u8 *iv);
27 EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way);
28 #ifdef CONFIG_AS_GFNI
29 asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
30 						  const u8 *src);
31 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way);
32 asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
33 						  const u8 *src);
34 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way);
35 asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
36 						    const u8 *src,
37 						    u8 *keystream, u8 *iv);
38 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way);
39 #endif /* CONFIG_AS_GFNI */
40 
41 static struct aria_avx_ops aria_ops;
42 
43 struct aria_avx_request_ctx {
44 	u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
45 };
46 
47 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
48 {
49 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
50 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
51 	ECB_BLOCK(1, aria_encrypt);
52 	ECB_WALK_END();
53 }
54 
55 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
56 {
57 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
58 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
59 	ECB_BLOCK(1, aria_decrypt);
60 	ECB_WALK_END();
61 }
62 
63 static int aria_avx_ecb_encrypt(struct skcipher_request *req)
64 {
65 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
66 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
67 
68 	return ecb_do_encrypt(req, ctx->enc_key[0]);
69 }
70 
71 static int aria_avx_ecb_decrypt(struct skcipher_request *req)
72 {
73 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
74 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
75 
76 	return ecb_do_decrypt(req, ctx->dec_key[0]);
77 }
78 
79 static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
80 			    unsigned int keylen)
81 {
82 	return aria_set_key(&tfm->base, key, keylen);
83 }
84 
85 static int aria_avx_ctr_encrypt(struct skcipher_request *req)
86 {
87 	struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req);
88 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
89 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
90 	struct skcipher_walk walk;
91 	unsigned int nbytes;
92 	int err;
93 
94 	err = skcipher_walk_virt(&walk, req, false);
95 
96 	while ((nbytes = walk.nbytes) > 0) {
97 		const u8 *src = walk.src.virt.addr;
98 		u8 *dst = walk.dst.virt.addr;
99 
100 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
101 			kernel_fpu_begin();
102 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
103 						      &req_ctx->keystream[0],
104 						      walk.iv);
105 			kernel_fpu_end();
106 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
107 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
108 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
109 		}
110 
111 		while (nbytes >= ARIA_BLOCK_SIZE) {
112 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
113 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
114 
115 			aria_encrypt(ctx, &req_ctx->keystream[0],
116 				     &req_ctx->keystream[0]);
117 
118 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
119 				       ARIA_BLOCK_SIZE);
120 			dst += ARIA_BLOCK_SIZE;
121 			src += ARIA_BLOCK_SIZE;
122 			nbytes -= ARIA_BLOCK_SIZE;
123 		}
124 
125 		if (walk.nbytes == walk.total && nbytes > 0) {
126 			memcpy(&req_ctx->keystream[0], walk.iv,
127 			       ARIA_BLOCK_SIZE);
128 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
129 
130 			aria_encrypt(ctx, &req_ctx->keystream[0],
131 				     &req_ctx->keystream[0]);
132 
133 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
134 				       nbytes);
135 			dst += nbytes;
136 			src += nbytes;
137 			nbytes = 0;
138 		}
139 		err = skcipher_walk_done(&walk, nbytes);
140 	}
141 
142 	return err;
143 }
144 
145 static int aria_avx_init_tfm(struct crypto_skcipher *tfm)
146 {
147 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx_request_ctx));
148 
149 	return 0;
150 }
151 
152 static struct skcipher_alg aria_algs[] = {
153 	{
154 		.base.cra_name		= "ecb(aria)",
155 		.base.cra_driver_name	= "ecb-aria-avx",
156 		.base.cra_priority	= 400,
157 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
158 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
159 		.base.cra_module	= THIS_MODULE,
160 		.min_keysize		= ARIA_MIN_KEY_SIZE,
161 		.max_keysize		= ARIA_MAX_KEY_SIZE,
162 		.setkey			= aria_avx_set_key,
163 		.encrypt		= aria_avx_ecb_encrypt,
164 		.decrypt		= aria_avx_ecb_decrypt,
165 	}, {
166 		.base.cra_name		= "ctr(aria)",
167 		.base.cra_driver_name	= "ctr-aria-avx",
168 		.base.cra_priority	= 400,
169 		.base.cra_blocksize	= 1,
170 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
171 		.base.cra_module	= THIS_MODULE,
172 		.min_keysize		= ARIA_MIN_KEY_SIZE,
173 		.max_keysize		= ARIA_MAX_KEY_SIZE,
174 		.ivsize			= ARIA_BLOCK_SIZE,
175 		.chunksize		= ARIA_BLOCK_SIZE,
176 		.walksize		= 16 * ARIA_BLOCK_SIZE,
177 		.setkey			= aria_avx_set_key,
178 		.encrypt		= aria_avx_ctr_encrypt,
179 		.decrypt		= aria_avx_ctr_encrypt,
180 		.init			= aria_avx_init_tfm,
181 	}
182 };
183 
184 static int __init aria_avx_init(void)
185 {
186 	const char *feature_name;
187 
188 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
189 	    !boot_cpu_has(X86_FEATURE_AES) ||
190 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
191 		pr_info("AVX or AES-NI instructions are not detected.\n");
192 		return -ENODEV;
193 	}
194 
195 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
196 				&feature_name)) {
197 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
198 		return -ENODEV;
199 	}
200 
201 	if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
202 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
203 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
204 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
205 	} else {
206 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
207 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
208 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
209 	}
210 
211 	return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
212 }
213 
214 static void __exit aria_avx_exit(void)
215 {
216 	crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
217 }
218 
219 module_init(aria_avx_init);
220 module_exit(aria_avx_exit);
221 
222 MODULE_LICENSE("GPL");
223 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
224 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
225 MODULE_ALIAS_CRYPTO("aria");
226 MODULE_ALIAS_CRYPTO("aria-aesni-avx");
227