1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * SM4-GCM AEAD Algorithm using ARMv8 Crypto Extensions
4 * as specified in rfc8998
5 * https://datatracker.ietf.org/doc/html/rfc8998
6 *
7 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
8 */
9
10 #include <linux/module.h>
11 #include <linux/crypto.h>
12 #include <linux/kernel.h>
13 #include <linux/cpufeature.h>
14 #include <asm/simd.h>
15 #include <crypto/b128ops.h>
16 #include <crypto/scatterwalk.h>
17 #include <crypto/internal/aead.h>
18 #include <crypto/internal/skcipher.h>
19 #include <crypto/sm4.h>
20 #include "sm4-ce.h"
21
22 asmlinkage void sm4_ce_pmull_ghash_setup(const u32 *rkey_enc, u8 *ghash_table);
23 asmlinkage void pmull_ghash_update(const u8 *ghash_table, u8 *ghash,
24 const u8 *src, unsigned int nblocks);
25 asmlinkage void sm4_ce_pmull_gcm_enc(const u32 *rkey_enc, u8 *dst,
26 const u8 *src, u8 *iv,
27 unsigned int nbytes, u8 *ghash,
28 const u8 *ghash_table, const u8 *lengths);
29 asmlinkage void sm4_ce_pmull_gcm_dec(const u32 *rkey_enc, u8 *dst,
30 const u8 *src, u8 *iv,
31 unsigned int nbytes, u8 *ghash,
32 const u8 *ghash_table, const u8 *lengths);
33
34 #define GHASH_BLOCK_SIZE 16
35 #define GCM_IV_SIZE 12
36
37 struct sm4_gcm_ctx {
38 struct sm4_ctx key;
39 u8 ghash_table[16 * 4];
40 };
41
42
gcm_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int key_len)43 static int gcm_setkey(struct crypto_aead *tfm, const u8 *key,
44 unsigned int key_len)
45 {
46 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(tfm);
47
48 if (key_len != SM4_KEY_SIZE)
49 return -EINVAL;
50
51 scoped_ksimd() {
52 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
53 crypto_sm4_fk, crypto_sm4_ck);
54 sm4_ce_pmull_ghash_setup(ctx->key.rkey_enc, ctx->ghash_table);
55 }
56 return 0;
57 }
58
gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)59 static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
60 {
61 switch (authsize) {
62 case 4:
63 case 8:
64 case 12 ... 16:
65 return 0;
66 default:
67 return -EINVAL;
68 }
69 }
70
gcm_calculate_auth_mac(struct aead_request * req,u8 ghash[])71 static void gcm_calculate_auth_mac(struct aead_request *req, u8 ghash[])
72 {
73 struct crypto_aead *aead = crypto_aead_reqtfm(req);
74 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
75 u8 __aligned(8) buffer[GHASH_BLOCK_SIZE];
76 u32 assoclen = req->assoclen;
77 struct scatter_walk walk;
78 unsigned int buflen = 0;
79
80 scatterwalk_start(&walk, req->src);
81
82 do {
83 unsigned int n, orig_n;
84 const u8 *p;
85
86 orig_n = scatterwalk_next(&walk, assoclen);
87 p = walk.addr;
88 n = orig_n;
89
90 if (n + buflen < GHASH_BLOCK_SIZE) {
91 memcpy(&buffer[buflen], p, n);
92 buflen += n;
93 } else {
94 unsigned int nblocks;
95
96 if (buflen) {
97 unsigned int l = GHASH_BLOCK_SIZE - buflen;
98
99 memcpy(&buffer[buflen], p, l);
100 p += l;
101 n -= l;
102
103 pmull_ghash_update(ctx->ghash_table, ghash,
104 buffer, 1);
105 }
106
107 nblocks = n / GHASH_BLOCK_SIZE;
108 if (nblocks) {
109 pmull_ghash_update(ctx->ghash_table, ghash,
110 p, nblocks);
111 p += nblocks * GHASH_BLOCK_SIZE;
112 }
113
114 buflen = n % GHASH_BLOCK_SIZE;
115 if (buflen)
116 memcpy(&buffer[0], p, buflen);
117 }
118
119 scatterwalk_done_src(&walk, orig_n);
120 assoclen -= orig_n;
121 } while (assoclen);
122
123 /* padding with '0' */
124 if (buflen) {
125 memset(&buffer[buflen], 0, GHASH_BLOCK_SIZE - buflen);
126 pmull_ghash_update(ctx->ghash_table, ghash, buffer, 1);
127 }
128 }
129
gcm_crypt(struct aead_request * req,struct skcipher_walk * walk,u8 ghash[],int err,void (* sm4_ce_pmull_gcm_crypt)(const u32 * rkey_enc,u8 * dst,const u8 * src,u8 * iv,unsigned int nbytes,u8 * ghash,const u8 * ghash_table,const u8 * lengths))130 static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
131 u8 ghash[], int err,
132 void (*sm4_ce_pmull_gcm_crypt)(const u32 *rkey_enc,
133 u8 *dst, const u8 *src, u8 *iv,
134 unsigned int nbytes, u8 *ghash,
135 const u8 *ghash_table, const u8 *lengths))
136 {
137 struct crypto_aead *aead = crypto_aead_reqtfm(req);
138 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
139 u8 __aligned(8) iv[SM4_BLOCK_SIZE];
140 be128 __aligned(8) lengths;
141
142 memset(ghash, 0, SM4_BLOCK_SIZE);
143
144 lengths.a = cpu_to_be64(req->assoclen * 8);
145 lengths.b = cpu_to_be64(walk->total * 8);
146
147 memcpy(iv, req->iv, GCM_IV_SIZE);
148 put_unaligned_be32(2, iv + GCM_IV_SIZE);
149
150 scoped_ksimd() {
151 if (req->assoclen)
152 gcm_calculate_auth_mac(req, ghash);
153
154 do {
155 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
156 const u8 *src = walk->src.virt.addr;
157 u8 *dst = walk->dst.virt.addr;
158 const u8 *l = NULL;
159
160 if (walk->nbytes == walk->total) {
161 l = (const u8 *)&lengths;
162 tail = 0;
163 }
164
165 sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
166 walk->nbytes - tail, ghash,
167 ctx->ghash_table, l);
168
169 err = skcipher_walk_done(walk, tail);
170 } while (walk->nbytes);
171 }
172 return err;
173 }
174
gcm_encrypt(struct aead_request * req)175 static int gcm_encrypt(struct aead_request *req)
176 {
177 struct crypto_aead *aead = crypto_aead_reqtfm(req);
178 u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
179 struct skcipher_walk walk;
180 int err;
181
182 err = skcipher_walk_aead_encrypt(&walk, req, false);
183 err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_enc);
184 if (err)
185 return err;
186
187 /* copy authtag to end of dst */
188 scatterwalk_map_and_copy(ghash, req->dst, req->assoclen + req->cryptlen,
189 crypto_aead_authsize(aead), 1);
190
191 return 0;
192 }
193
gcm_decrypt(struct aead_request * req)194 static int gcm_decrypt(struct aead_request *req)
195 {
196 struct crypto_aead *aead = crypto_aead_reqtfm(req);
197 unsigned int authsize = crypto_aead_authsize(aead);
198 u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
199 u8 authtag[SM4_BLOCK_SIZE];
200 struct skcipher_walk walk;
201 int err;
202
203 err = skcipher_walk_aead_decrypt(&walk, req, false);
204 err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_dec);
205 if (err)
206 return err;
207
208 /* compare calculated auth tag with the stored one */
209 scatterwalk_map_and_copy(authtag, req->src,
210 req->assoclen + req->cryptlen - authsize,
211 authsize, 0);
212
213 if (crypto_memneq(authtag, ghash, authsize))
214 return -EBADMSG;
215
216 return 0;
217 }
218
219 static struct aead_alg sm4_gcm_alg = {
220 .base = {
221 .cra_name = "gcm(sm4)",
222 .cra_driver_name = "gcm-sm4-ce",
223 .cra_priority = 400,
224 .cra_blocksize = 1,
225 .cra_ctxsize = sizeof(struct sm4_gcm_ctx),
226 .cra_module = THIS_MODULE,
227 },
228 .ivsize = GCM_IV_SIZE,
229 .chunksize = SM4_BLOCK_SIZE,
230 .maxauthsize = SM4_BLOCK_SIZE,
231 .setkey = gcm_setkey,
232 .setauthsize = gcm_setauthsize,
233 .encrypt = gcm_encrypt,
234 .decrypt = gcm_decrypt,
235 };
236
sm4_ce_gcm_init(void)237 static int __init sm4_ce_gcm_init(void)
238 {
239 if (!cpu_have_named_feature(PMULL))
240 return -ENODEV;
241
242 return crypto_register_aead(&sm4_gcm_alg);
243 }
244
sm4_ce_gcm_exit(void)245 static void __exit sm4_ce_gcm_exit(void)
246 {
247 crypto_unregister_aead(&sm4_gcm_alg);
248 }
249
250 static const struct cpu_feature __maybe_unused sm4_ce_gcm_cpu_feature[] = {
251 { cpu_feature(PMULL) },
252 {}
253 };
254 MODULE_DEVICE_TABLE(cpu, sm4_ce_gcm_cpu_feature);
255
256 module_cpu_feature_match(SM4, sm4_ce_gcm_init);
257 module_exit(sm4_ce_gcm_exit);
258
259 MODULE_DESCRIPTION("Synchronous SM4 in GCM mode using ARMv8 Crypto Extensions");
260 MODULE_ALIAS_CRYPTO("gcm(sm4)");
261 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
262 MODULE_LICENSE("GPL v2");
263