1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4 *
5 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6 */
7
8 #include <crypto/algapi.h>
9 #include <crypto/aria.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18
19 asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
20 const u8 *src);
21 EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way);
22 asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
23 const u8 *src);
24 EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way);
25 asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
26 const u8 *src,
27 u8 *keystream, u8 *iv);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way);
29 asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
30 const u8 *src);
31 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way);
32 asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
33 const u8 *src);
34 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way);
35 asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
36 const u8 *src,
37 u8 *keystream, u8 *iv);
38 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way);
39
40 static struct aria_avx_ops aria_ops;
41
42 struct aria_avx_request_ctx {
43 u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
44 };
45
ecb_do_encrypt(struct skcipher_request * req,const u32 * rkey)46 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
47 {
48 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
49 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
50 ECB_BLOCK(1, aria_encrypt);
51 ECB_WALK_END();
52 }
53
ecb_do_decrypt(struct skcipher_request * req,const u32 * rkey)54 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
55 {
56 ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
57 ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
58 ECB_BLOCK(1, aria_decrypt);
59 ECB_WALK_END();
60 }
61
aria_avx_ecb_encrypt(struct skcipher_request * req)62 static int aria_avx_ecb_encrypt(struct skcipher_request *req)
63 {
64 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
65 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
66
67 return ecb_do_encrypt(req, ctx->enc_key[0]);
68 }
69
aria_avx_ecb_decrypt(struct skcipher_request * req)70 static int aria_avx_ecb_decrypt(struct skcipher_request *req)
71 {
72 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
73 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
74
75 return ecb_do_decrypt(req, ctx->dec_key[0]);
76 }
77
aria_avx_set_key(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)78 static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
79 unsigned int keylen)
80 {
81 return aria_set_key(&tfm->base, key, keylen);
82 }
83
aria_avx_ctr_encrypt(struct skcipher_request * req)84 static int aria_avx_ctr_encrypt(struct skcipher_request *req)
85 {
86 struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req);
87 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
88 struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
89 struct skcipher_walk walk;
90 unsigned int nbytes;
91 int err;
92
93 err = skcipher_walk_virt(&walk, req, false);
94
95 while ((nbytes = walk.nbytes) > 0) {
96 const u8 *src = walk.src.virt.addr;
97 u8 *dst = walk.dst.virt.addr;
98
99 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
100 kernel_fpu_begin();
101 aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
102 &req_ctx->keystream[0],
103 walk.iv);
104 kernel_fpu_end();
105 dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
106 src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
107 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
108 }
109
110 while (nbytes >= ARIA_BLOCK_SIZE) {
111 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
112 crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
113
114 aria_encrypt(ctx, &req_ctx->keystream[0],
115 &req_ctx->keystream[0]);
116
117 crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
118 ARIA_BLOCK_SIZE);
119 dst += ARIA_BLOCK_SIZE;
120 src += ARIA_BLOCK_SIZE;
121 nbytes -= ARIA_BLOCK_SIZE;
122 }
123
124 if (walk.nbytes == walk.total && nbytes > 0) {
125 memcpy(&req_ctx->keystream[0], walk.iv,
126 ARIA_BLOCK_SIZE);
127 crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
128
129 aria_encrypt(ctx, &req_ctx->keystream[0],
130 &req_ctx->keystream[0]);
131
132 crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
133 nbytes);
134 dst += nbytes;
135 src += nbytes;
136 nbytes = 0;
137 }
138 err = skcipher_walk_done(&walk, nbytes);
139 }
140
141 return err;
142 }
143
aria_avx_init_tfm(struct crypto_skcipher * tfm)144 static int aria_avx_init_tfm(struct crypto_skcipher *tfm)
145 {
146 crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx_request_ctx));
147
148 return 0;
149 }
150
151 static struct skcipher_alg aria_algs[] = {
152 {
153 .base.cra_name = "ecb(aria)",
154 .base.cra_driver_name = "ecb-aria-avx",
155 .base.cra_priority = 400,
156 .base.cra_blocksize = ARIA_BLOCK_SIZE,
157 .base.cra_ctxsize = sizeof(struct aria_ctx),
158 .base.cra_module = THIS_MODULE,
159 .min_keysize = ARIA_MIN_KEY_SIZE,
160 .max_keysize = ARIA_MAX_KEY_SIZE,
161 .setkey = aria_avx_set_key,
162 .encrypt = aria_avx_ecb_encrypt,
163 .decrypt = aria_avx_ecb_decrypt,
164 }, {
165 .base.cra_name = "ctr(aria)",
166 .base.cra_driver_name = "ctr-aria-avx",
167 .base.cra_priority = 400,
168 .base.cra_blocksize = 1,
169 .base.cra_ctxsize = sizeof(struct aria_ctx),
170 .base.cra_module = THIS_MODULE,
171 .min_keysize = ARIA_MIN_KEY_SIZE,
172 .max_keysize = ARIA_MAX_KEY_SIZE,
173 .ivsize = ARIA_BLOCK_SIZE,
174 .chunksize = ARIA_BLOCK_SIZE,
175 .walksize = 16 * ARIA_BLOCK_SIZE,
176 .setkey = aria_avx_set_key,
177 .encrypt = aria_avx_ctr_encrypt,
178 .decrypt = aria_avx_ctr_encrypt,
179 .init = aria_avx_init_tfm,
180 }
181 };
182
aria_avx_init(void)183 static int __init aria_avx_init(void)
184 {
185 const char *feature_name;
186
187 if (!boot_cpu_has(X86_FEATURE_AVX) ||
188 !boot_cpu_has(X86_FEATURE_AES) ||
189 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
190 pr_info("AVX or AES-NI instructions are not detected.\n");
191 return -ENODEV;
192 }
193
194 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
195 &feature_name)) {
196 pr_info("CPU feature '%s' is not supported.\n", feature_name);
197 return -ENODEV;
198 }
199
200 if (boot_cpu_has(X86_FEATURE_GFNI)) {
201 aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
202 aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
203 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
204 } else {
205 aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
206 aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
207 aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
208 }
209
210 return crypto_register_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
211 }
212
aria_avx_exit(void)213 static void __exit aria_avx_exit(void)
214 {
215 crypto_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs));
216 }
217
218 module_init(aria_avx_init);
219 module_exit(aria_avx_exit);
220
221 MODULE_LICENSE("GPL");
222 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
223 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
224 MODULE_ALIAS_CRYPTO("aria");
225 MODULE_ALIAS_CRYPTO("aria-aesni-avx");
226