xref: /linux/arch/arm64/crypto/sm4-ce-ccm-glue.c (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SM4-CCM AEAD Algorithm using ARMv8 Crypto Extensions
4  * as specified in rfc8998
5  * https://datatracker.ietf.org/doc/html/rfc8998
6  *
7  * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
8  */
9 
10 #include <linux/module.h>
11 #include <linux/crypto.h>
12 #include <linux/kernel.h>
13 #include <linux/cpufeature.h>
14 #include <asm/simd.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/sm4.h>
19 #include "sm4-ce.h"
20 
21 asmlinkage void sm4_ce_cbcmac_update(const u32 *rkey_enc, u8 *mac,
22 				     const u8 *src, unsigned int nblocks);
23 asmlinkage void sm4_ce_ccm_enc(const u32 *rkey_enc, u8 *dst, const u8 *src,
24 			       u8 *iv, unsigned int nbytes, u8 *mac);
25 asmlinkage void sm4_ce_ccm_dec(const u32 *rkey_enc, u8 *dst, const u8 *src,
26 			       u8 *iv, unsigned int nbytes, u8 *mac);
27 asmlinkage void sm4_ce_ccm_final(const u32 *rkey_enc, u8 *iv, u8 *mac);
28 
29 
30 static int ccm_setkey(struct crypto_aead *tfm, const u8 *key,
31 		      unsigned int key_len)
32 {
33 	struct sm4_ctx *ctx = crypto_aead_ctx(tfm);
34 
35 	if (key_len != SM4_KEY_SIZE)
36 		return -EINVAL;
37 
38 	scoped_ksimd()
39 		sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
40 				  crypto_sm4_fk, crypto_sm4_ck);
41 
42 	return 0;
43 }
44 
45 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
46 {
47 	if ((authsize & 1) || authsize < 4)
48 		return -EINVAL;
49 	return 0;
50 }
51 
52 static int ccm_format_input(u8 info[], struct aead_request *req,
53 			    unsigned int msglen)
54 {
55 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
56 	unsigned int l = req->iv[0] + 1;
57 	unsigned int m;
58 	__be32 len;
59 
60 	/* verify that CCM dimension 'L': 2 <= L <= 8 */
61 	if (l < 2 || l > 8)
62 		return -EINVAL;
63 	if (l < 4 && msglen >> (8 * l))
64 		return -EOVERFLOW;
65 
66 	memset(&req->iv[SM4_BLOCK_SIZE - l], 0, l);
67 
68 	memcpy(info, req->iv, SM4_BLOCK_SIZE);
69 
70 	m = crypto_aead_authsize(aead);
71 
72 	/* format flags field per RFC 3610/NIST 800-38C */
73 	*info |= ((m - 2) / 2) << 3;
74 	if (req->assoclen)
75 		*info |= (1 << 6);
76 
77 	/*
78 	 * format message length field,
79 	 * Linux uses a u32 type to represent msglen
80 	 */
81 	if (l >= 4)
82 		l = 4;
83 
84 	len = cpu_to_be32(msglen);
85 	memcpy(&info[SM4_BLOCK_SIZE - l], (u8 *)&len + 4 - l, l);
86 
87 	return 0;
88 }
89 
90 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
91 {
92 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
93 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
94 	struct __packed { __be16 l; __be32 h; } aadlen;
95 	u32 assoclen = req->assoclen;
96 	struct scatter_walk walk;
97 	unsigned int len;
98 
99 	if (assoclen < 0xff00) {
100 		aadlen.l = cpu_to_be16(assoclen);
101 		len = 2;
102 	} else {
103 		aadlen.l = cpu_to_be16(0xfffe);
104 		put_unaligned_be32(assoclen, &aadlen.h);
105 		len = 6;
106 	}
107 
108 	sm4_ce_crypt_block(ctx->rkey_enc, mac, mac);
109 	crypto_xor(mac, (const u8 *)&aadlen, len);
110 
111 	scatterwalk_start(&walk, req->src);
112 
113 	do {
114 		unsigned int n, orig_n;
115 		const u8 *p;
116 
117 		orig_n = scatterwalk_next(&walk, assoclen);
118 		p = walk.addr;
119 		n = orig_n;
120 
121 		while (n > 0) {
122 			unsigned int l, nblocks;
123 
124 			if (len == SM4_BLOCK_SIZE) {
125 				if (n < SM4_BLOCK_SIZE) {
126 					sm4_ce_crypt_block(ctx->rkey_enc,
127 							   mac, mac);
128 
129 					len = 0;
130 				} else {
131 					nblocks = n / SM4_BLOCK_SIZE;
132 					sm4_ce_cbcmac_update(ctx->rkey_enc,
133 							     mac, p, nblocks);
134 
135 					p += nblocks * SM4_BLOCK_SIZE;
136 					n %= SM4_BLOCK_SIZE;
137 
138 					continue;
139 				}
140 			}
141 
142 			l = min(n, SM4_BLOCK_SIZE - len);
143 			if (l) {
144 				crypto_xor(mac + len, p, l);
145 				len += l;
146 				p += l;
147 				n -= l;
148 			}
149 		}
150 
151 		scatterwalk_done_src(&walk, orig_n);
152 		assoclen -= orig_n;
153 	} while (assoclen);
154 }
155 
156 static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
157 		     u32 *rkey_enc, u8 mac[],
158 		     void (*sm4_ce_ccm_crypt)(const u32 *rkey_enc, u8 *dst,
159 					const u8 *src, u8 *iv,
160 					unsigned int nbytes, u8 *mac))
161 {
162 	u8 __aligned(8) ctr0[SM4_BLOCK_SIZE];
163 	int err = 0;
164 
165 	/* preserve the initial ctr0 for the TAG */
166 	memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE);
167 	crypto_inc(walk->iv, SM4_BLOCK_SIZE);
168 
169 	scoped_ksimd() {
170 		if (req->assoclen)
171 			ccm_calculate_auth_mac(req, mac);
172 
173 		while (walk->nbytes) {
174 			unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
175 
176 			if (walk->nbytes == walk->total)
177 				tail = 0;
178 
179 			sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
180 					 walk->src.virt.addr, walk->iv,
181 					 walk->nbytes - tail, mac);
182 
183 			err = skcipher_walk_done(walk, tail);
184 		}
185 		sm4_ce_ccm_final(rkey_enc, ctr0, mac);
186 	}
187 
188 	return err;
189 }
190 
191 static int ccm_encrypt(struct aead_request *req)
192 {
193 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
194 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
195 	u8 __aligned(8) mac[SM4_BLOCK_SIZE];
196 	struct skcipher_walk walk;
197 	int err;
198 
199 	err = ccm_format_input(mac, req, req->cryptlen);
200 	if (err)
201 		return err;
202 
203 	err = skcipher_walk_aead_encrypt(&walk, req, false);
204 	if (err)
205 		return err;
206 
207 	err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_enc);
208 	if (err)
209 		return err;
210 
211 	/* copy authtag to end of dst */
212 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
213 				 crypto_aead_authsize(aead), 1);
214 
215 	return 0;
216 }
217 
218 static int ccm_decrypt(struct aead_request *req)
219 {
220 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
221 	unsigned int authsize = crypto_aead_authsize(aead);
222 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
223 	u8 __aligned(8) mac[SM4_BLOCK_SIZE];
224 	u8 authtag[SM4_BLOCK_SIZE];
225 	struct skcipher_walk walk;
226 	int err;
227 
228 	err = ccm_format_input(mac, req, req->cryptlen - authsize);
229 	if (err)
230 		return err;
231 
232 	err = skcipher_walk_aead_decrypt(&walk, req, false);
233 	if (err)
234 		return err;
235 
236 	err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_dec);
237 	if (err)
238 		return err;
239 
240 	/* compare calculated auth tag with the stored one */
241 	scatterwalk_map_and_copy(authtag, req->src,
242 				 req->assoclen + req->cryptlen - authsize,
243 				 authsize, 0);
244 
245 	if (crypto_memneq(authtag, mac, authsize))
246 		return -EBADMSG;
247 
248 	return 0;
249 }
250 
251 static struct aead_alg sm4_ccm_alg = {
252 	.base = {
253 		.cra_name		= "ccm(sm4)",
254 		.cra_driver_name	= "ccm-sm4-ce",
255 		.cra_priority		= 400,
256 		.cra_blocksize		= 1,
257 		.cra_ctxsize		= sizeof(struct sm4_ctx),
258 		.cra_module		= THIS_MODULE,
259 	},
260 	.ivsize		= SM4_BLOCK_SIZE,
261 	.chunksize	= SM4_BLOCK_SIZE,
262 	.maxauthsize	= SM4_BLOCK_SIZE,
263 	.setkey		= ccm_setkey,
264 	.setauthsize	= ccm_setauthsize,
265 	.encrypt	= ccm_encrypt,
266 	.decrypt	= ccm_decrypt,
267 };
268 
269 static int __init sm4_ce_ccm_init(void)
270 {
271 	return crypto_register_aead(&sm4_ccm_alg);
272 }
273 
274 static void __exit sm4_ce_ccm_exit(void)
275 {
276 	crypto_unregister_aead(&sm4_ccm_alg);
277 }
278 
279 module_cpu_feature_match(SM4, sm4_ce_ccm_init);
280 module_exit(sm4_ce_ccm_exit);
281 
282 MODULE_DESCRIPTION("Synchronous SM4 in CCM mode using ARMv8 Crypto Extensions");
283 MODULE_ALIAS_CRYPTO("ccm(sm4)");
284 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
285 MODULE_LICENSE("GPL v2");
286