xref: /linux/arch/x86/crypto/aria_aesni_avx_glue.c (revision 9fd2da71c301184d98fe37674ca8d017d1ce6600)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18 
19 asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
20 					     const u8 *src);
21 EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way);
22 asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
23 					     const u8 *src);
24 EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way);
25 asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
26 					       const u8 *src,
27 					       u8 *keystream, u8 *iv);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way);
29 #ifdef CONFIG_AS_GFNI
30 asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
31 						  const u8 *src);
32 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way);
33 asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
34 						  const u8 *src);
35 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way);
36 asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
37 						    const u8 *src,
38 						    u8 *keystream, u8 *iv);
39 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way);
40 #endif /* CONFIG_AS_GFNI */
41 
42 static struct aria_avx_ops aria_ops;
43 
44 struct aria_avx_request_ctx {
45 	u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
46 };
47 
48 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
49 {
50 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
51 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
52 	ECB_BLOCK(1, aria_encrypt);
53 	ECB_WALK_END();
54 }
55 
56 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
57 {
58 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
59 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
60 	ECB_BLOCK(1, aria_decrypt);
61 	ECB_WALK_END();
62 }
63 
64 static int aria_avx_ecb_encrypt(struct skcipher_request *req)
65 {
66 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
67 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
68 
69 	return ecb_do_encrypt(req, ctx->enc_key[0]);
70 }
71 
72 static int aria_avx_ecb_decrypt(struct skcipher_request *req)
73 {
74 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
75 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
76 
77 	return ecb_do_decrypt(req, ctx->dec_key[0]);
78 }
79 
80 static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
81 			    unsigned int keylen)
82 {
83 	return aria_set_key(&tfm->base, key, keylen);
84 }
85 
86 static int aria_avx_ctr_encrypt(struct skcipher_request *req)
87 {
88 	struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req);
89 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
90 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
91 	struct skcipher_walk walk;
92 	unsigned int nbytes;
93 	int err;
94 
95 	err = skcipher_walk_virt(&walk, req, false);
96 
97 	while ((nbytes = walk.nbytes) > 0) {
98 		const u8 *src = walk.src.virt.addr;
99 		u8 *dst = walk.dst.virt.addr;
100 
101 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
102 			kernel_fpu_begin();
103 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
104 						      &req_ctx->keystream[0],
105 						      walk.iv);
106 			kernel_fpu_end();
107 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
108 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
109 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
110 		}
111 
112 		while (nbytes >= ARIA_BLOCK_SIZE) {
113 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
114 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
115 
116 			aria_encrypt(ctx, &req_ctx->keystream[0],
117 				     &req_ctx->keystream[0]);
118 
119 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
120 				       ARIA_BLOCK_SIZE);
121 			dst += ARIA_BLOCK_SIZE;
122 			src += ARIA_BLOCK_SIZE;
123 			nbytes -= ARIA_BLOCK_SIZE;
124 		}
125 
126 		if (walk.nbytes == walk.total && nbytes > 0) {
127 			memcpy(&req_ctx->keystream[0], walk.iv,
128 			       ARIA_BLOCK_SIZE);
129 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
130 
131 			aria_encrypt(ctx, &req_ctx->keystream[0],
132 				     &req_ctx->keystream[0]);
133 
134 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
135 				       nbytes);
136 			dst += nbytes;
137 			src += nbytes;
138 			nbytes = 0;
139 		}
140 		err = skcipher_walk_done(&walk, nbytes);
141 	}
142 
143 	return err;
144 }
145 
146 static int aria_avx_init_tfm(struct crypto_skcipher *tfm)
147 {
148 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx_request_ctx));
149 
150 	return 0;
151 }
152 
153 static struct skcipher_alg aria_algs[] = {
154 	{
155 		.base.cra_name		= "ecb(aria)",
156 		.base.cra_driver_name	= "ecb-aria-avx",
157 		.base.cra_priority	= 400,
158 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
159 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
160 		.base.cra_module	= THIS_MODULE,
161 		.min_keysize		= ARIA_MIN_KEY_SIZE,
162 		.max_keysize		= ARIA_MAX_KEY_SIZE,
163 		.setkey			= aria_avx_set_key,
164 		.encrypt		= aria_avx_ecb_encrypt,
165 		.decrypt		= aria_avx_ecb_decrypt,
166 	}, {
167 		.base.cra_name		= "ctr(aria)",
168 		.base.cra_driver_name	= "ctr-aria-avx",
169 		.base.cra_priority	= 400,
170 		.base.cra_blocksize	= 1,
171 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
172 		.base.cra_module	= THIS_MODULE,
173 		.min_keysize		= ARIA_MIN_KEY_SIZE,
174 		.max_keysize		= ARIA_MAX_KEY_SIZE,
175 		.ivsize			= ARIA_BLOCK_SIZE,
176 		.chunksize		= ARIA_BLOCK_SIZE,
177 		.walksize		= 16 * ARIA_BLOCK_SIZE,
178 		.setkey			= aria_avx_set_key,
179 		.encrypt		= aria_avx_ctr_encrypt,
180 		.decrypt		= aria_avx_ctr_encrypt,
181 		.init			= aria_avx_init_tfm,
182 	}
183 };
184 
185 static int __init aria_avx_init(void)
186 {
187 	const char *feature_name;
188 
189 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
190 	    !boot_cpu_has(X86_FEATURE_AES) ||
191 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
192 		pr_info("AVX or AES-NI instructions are not detected.\n");
193 		return -ENODEV;
194 	}
195 
196 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
197 				&feature_name)) {
198 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
199 		return -ENODEV;
200 	}
201 
202 	if (boot_cpu_has(X86_FEATURE_GFNI) && IS_ENABLED(CONFIG_AS_GFNI)) {
203 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
204 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
205 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
206 	} else {
207 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
208 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
209 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
210 	}
211 
212 	return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
213 }
214 
215 static void __exit aria_avx_exit(void)
216 {
217 	crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
218 }
219 
220 module_init(aria_avx_init);
221 module_exit(aria_avx_exit);
222 
223 MODULE_LICENSE("GPL");
224 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
225 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
226 MODULE_ALIAS_CRYPTO("aria");
227 MODULE_ALIAS_CRYPTO("aria-aesni-avx");
228