xref: /linux/arch/arm64/crypto/sm4-ce-glue.c (revision 187d0801404f415f22c0b31531982c7ea97fa341)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SM4 Cipher Algorithm, using ARMv8 Crypto Extensions
4  * as specified in
5  * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
6  *
7  * Copyright (C) 2022, Alibaba Group.
8  * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
9  */
10 
11 #include <asm/simd.h>
12 #include <crypto/b128ops.h>
13 #include <crypto/internal/hash.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/sm4.h>
17 #include <crypto/utils.h>
18 #include <crypto/xts.h>
19 #include <linux/cpufeature.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/string.h>
23 
24 #define BYTES2BLKS(nbytes)	((nbytes) >> 4)
25 
26 asmlinkage void sm4_ce_expand_key(const u8 *key, u32 *rkey_enc, u32 *rkey_dec,
27 				  const u32 *fk, const u32 *ck);
28 asmlinkage void sm4_ce_crypt_block(const u32 *rkey, u8 *dst, const u8 *src);
29 asmlinkage void sm4_ce_crypt(const u32 *rkey, u8 *dst, const u8 *src,
30 			     unsigned int nblks);
31 asmlinkage void sm4_ce_cbc_enc(const u32 *rkey, u8 *dst, const u8 *src,
32 			       u8 *iv, unsigned int nblocks);
33 asmlinkage void sm4_ce_cbc_dec(const u32 *rkey, u8 *dst, const u8 *src,
34 			       u8 *iv, unsigned int nblocks);
35 asmlinkage void sm4_ce_cbc_cts_enc(const u32 *rkey, u8 *dst, const u8 *src,
36 				   u8 *iv, unsigned int nbytes);
37 asmlinkage void sm4_ce_cbc_cts_dec(const u32 *rkey, u8 *dst, const u8 *src,
38 				   u8 *iv, unsigned int nbytes);
39 asmlinkage void sm4_ce_ctr_enc(const u32 *rkey, u8 *dst, const u8 *src,
40 			       u8 *iv, unsigned int nblks);
41 asmlinkage void sm4_ce_xts_enc(const u32 *rkey1, u8 *dst, const u8 *src,
42 			       u8 *tweak, unsigned int nbytes,
43 			       const u32 *rkey2_enc);
44 asmlinkage void sm4_ce_xts_dec(const u32 *rkey1, u8 *dst, const u8 *src,
45 			       u8 *tweak, unsigned int nbytes,
46 			       const u32 *rkey2_enc);
47 asmlinkage void sm4_ce_mac_update(const u32 *rkey_enc, u8 *digest,
48 				  const u8 *src, unsigned int nblocks,
49 				  bool enc_before, bool enc_after);
50 
51 EXPORT_SYMBOL(sm4_ce_expand_key);
52 EXPORT_SYMBOL(sm4_ce_crypt_block);
53 EXPORT_SYMBOL(sm4_ce_cbc_enc);
54 
55 struct sm4_xts_ctx {
56 	struct sm4_ctx key1;
57 	struct sm4_ctx key2;
58 };
59 
60 struct sm4_mac_tfm_ctx {
61 	struct sm4_ctx key;
62 	u8 __aligned(8) consts[];
63 };
64 
65 struct sm4_mac_desc_ctx {
66 	u8 digest[SM4_BLOCK_SIZE];
67 };
68 
sm4_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)69 static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key,
70 		      unsigned int key_len)
71 {
72 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
73 
74 	if (key_len != SM4_KEY_SIZE)
75 		return -EINVAL;
76 
77 	scoped_ksimd()
78 		sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
79 				  crypto_sm4_fk, crypto_sm4_ck);
80 	return 0;
81 }
82 
sm4_xts_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)83 static int sm4_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
84 			  unsigned int key_len)
85 {
86 	struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
87 	int ret;
88 
89 	if (key_len != SM4_KEY_SIZE * 2)
90 		return -EINVAL;
91 
92 	ret = xts_verify_key(tfm, key, key_len);
93 	if (ret)
94 		return ret;
95 
96 	scoped_ksimd() {
97 		sm4_ce_expand_key(key, ctx->key1.rkey_enc,
98 				ctx->key1.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
99 		sm4_ce_expand_key(&key[SM4_KEY_SIZE], ctx->key2.rkey_enc,
100 				ctx->key2.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
101 	}
102 
103 	return 0;
104 }
105 
sm4_ecb_do_crypt(struct skcipher_request * req,const u32 * rkey)106 static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey)
107 {
108 	struct skcipher_walk walk;
109 	unsigned int nbytes;
110 	int err;
111 
112 	err = skcipher_walk_virt(&walk, req, false);
113 
114 	while ((nbytes = walk.nbytes) > 0) {
115 		const u8 *src = walk.src.virt.addr;
116 		u8 *dst = walk.dst.virt.addr;
117 		unsigned int nblks;
118 
119 		scoped_ksimd() {
120 			nblks = BYTES2BLKS(nbytes);
121 			if (nblks) {
122 				sm4_ce_crypt(rkey, dst, src, nblks);
123 				nbytes -= nblks * SM4_BLOCK_SIZE;
124 			}
125 		}
126 
127 		err = skcipher_walk_done(&walk, nbytes);
128 	}
129 
130 	return err;
131 }
132 
sm4_ecb_encrypt(struct skcipher_request * req)133 static int sm4_ecb_encrypt(struct skcipher_request *req)
134 {
135 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
136 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
137 
138 	return sm4_ecb_do_crypt(req, ctx->rkey_enc);
139 }
140 
sm4_ecb_decrypt(struct skcipher_request * req)141 static int sm4_ecb_decrypt(struct skcipher_request *req)
142 {
143 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
144 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
145 
146 	return sm4_ecb_do_crypt(req, ctx->rkey_dec);
147 }
148 
sm4_cbc_crypt(struct skcipher_request * req,struct sm4_ctx * ctx,bool encrypt)149 static int sm4_cbc_crypt(struct skcipher_request *req,
150 			 struct sm4_ctx *ctx, bool encrypt)
151 {
152 	struct skcipher_walk walk;
153 	unsigned int nbytes;
154 	int err;
155 
156 	err = skcipher_walk_virt(&walk, req, false);
157 	if (err)
158 		return err;
159 
160 	while ((nbytes = walk.nbytes) > 0) {
161 		const u8 *src = walk.src.virt.addr;
162 		u8 *dst = walk.dst.virt.addr;
163 		unsigned int nblocks;
164 
165 		nblocks = nbytes / SM4_BLOCK_SIZE;
166 		if (nblocks) {
167 			scoped_ksimd() {
168 				if (encrypt)
169 					sm4_ce_cbc_enc(ctx->rkey_enc, dst, src,
170 						       walk.iv, nblocks);
171 				else
172 					sm4_ce_cbc_dec(ctx->rkey_dec, dst, src,
173 						       walk.iv, nblocks);
174 			}
175 		}
176 
177 		err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE);
178 	}
179 
180 	return err;
181 }
182 
sm4_cbc_encrypt(struct skcipher_request * req)183 static int sm4_cbc_encrypt(struct skcipher_request *req)
184 {
185 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
186 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
187 
188 	return sm4_cbc_crypt(req, ctx, true);
189 }
190 
sm4_cbc_decrypt(struct skcipher_request * req)191 static int sm4_cbc_decrypt(struct skcipher_request *req)
192 {
193 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
194 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
195 
196 	return sm4_cbc_crypt(req, ctx, false);
197 }
198 
sm4_cbc_cts_crypt(struct skcipher_request * req,bool encrypt)199 static int sm4_cbc_cts_crypt(struct skcipher_request *req, bool encrypt)
200 {
201 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
202 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
203 	struct scatterlist *src = req->src;
204 	struct scatterlist *dst = req->dst;
205 	struct scatterlist sg_src[2], sg_dst[2];
206 	struct skcipher_request subreq;
207 	struct skcipher_walk walk;
208 	int cbc_blocks;
209 	int err;
210 
211 	if (req->cryptlen < SM4_BLOCK_SIZE)
212 		return -EINVAL;
213 
214 	if (req->cryptlen == SM4_BLOCK_SIZE)
215 		return sm4_cbc_crypt(req, ctx, encrypt);
216 
217 	skcipher_request_set_tfm(&subreq, tfm);
218 	skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
219 				      NULL, NULL);
220 
221 	/* handle the CBC cryption part */
222 	cbc_blocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2;
223 	if (cbc_blocks) {
224 		skcipher_request_set_crypt(&subreq, src, dst,
225 					   cbc_blocks * SM4_BLOCK_SIZE,
226 					   req->iv);
227 
228 		err = sm4_cbc_crypt(&subreq, ctx, encrypt);
229 		if (err)
230 			return err;
231 
232 		dst = src = scatterwalk_ffwd(sg_src, src, subreq.cryptlen);
233 		if (req->dst != req->src)
234 			dst = scatterwalk_ffwd(sg_dst, req->dst,
235 					       subreq.cryptlen);
236 	}
237 
238 	/* handle ciphertext stealing */
239 	skcipher_request_set_crypt(&subreq, src, dst,
240 				   req->cryptlen - cbc_blocks * SM4_BLOCK_SIZE,
241 				   req->iv);
242 
243 	err = skcipher_walk_virt(&walk, &subreq, false);
244 	if (err)
245 		return err;
246 
247 	scoped_ksimd() {
248 		if (encrypt)
249 			sm4_ce_cbc_cts_enc(ctx->rkey_enc, walk.dst.virt.addr,
250 					   walk.src.virt.addr, walk.iv, walk.nbytes);
251 		else
252 			sm4_ce_cbc_cts_dec(ctx->rkey_dec, walk.dst.virt.addr,
253 					   walk.src.virt.addr, walk.iv, walk.nbytes);
254 	}
255 
256 	return skcipher_walk_done(&walk, 0);
257 }
258 
sm4_cbc_cts_encrypt(struct skcipher_request * req)259 static int sm4_cbc_cts_encrypt(struct skcipher_request *req)
260 {
261 	return sm4_cbc_cts_crypt(req, true);
262 }
263 
sm4_cbc_cts_decrypt(struct skcipher_request * req)264 static int sm4_cbc_cts_decrypt(struct skcipher_request *req)
265 {
266 	return sm4_cbc_cts_crypt(req, false);
267 }
268 
sm4_ctr_crypt(struct skcipher_request * req)269 static int sm4_ctr_crypt(struct skcipher_request *req)
270 {
271 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
272 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
273 	struct skcipher_walk walk;
274 	unsigned int nbytes;
275 	int err;
276 
277 	err = skcipher_walk_virt(&walk, req, false);
278 
279 	while ((nbytes = walk.nbytes) > 0) {
280 		const u8 *src = walk.src.virt.addr;
281 		u8 *dst = walk.dst.virt.addr;
282 		unsigned int nblks;
283 
284 		scoped_ksimd() {
285 			nblks = BYTES2BLKS(nbytes);
286 			if (nblks) {
287 				sm4_ce_ctr_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
288 				dst += nblks * SM4_BLOCK_SIZE;
289 				src += nblks * SM4_BLOCK_SIZE;
290 				nbytes -= nblks * SM4_BLOCK_SIZE;
291 			}
292 
293 			/* tail */
294 			if (walk.nbytes == walk.total && nbytes > 0) {
295 				u8 keystream[SM4_BLOCK_SIZE];
296 
297 				sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
298 				crypto_inc(walk.iv, SM4_BLOCK_SIZE);
299 				crypto_xor_cpy(dst, src, keystream, nbytes);
300 				nbytes = 0;
301 			}
302 		}
303 
304 		err = skcipher_walk_done(&walk, nbytes);
305 	}
306 
307 	return err;
308 }
309 
sm4_xts_crypt(struct skcipher_request * req,bool encrypt)310 static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
311 {
312 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
313 	struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
314 	int tail = req->cryptlen % SM4_BLOCK_SIZE;
315 	const u32 *rkey2_enc = ctx->key2.rkey_enc;
316 	struct scatterlist sg_src[2], sg_dst[2];
317 	struct skcipher_request subreq;
318 	struct scatterlist *src, *dst;
319 	struct skcipher_walk walk;
320 	unsigned int nbytes;
321 	int err;
322 
323 	if (req->cryptlen < SM4_BLOCK_SIZE)
324 		return -EINVAL;
325 
326 	err = skcipher_walk_virt(&walk, req, false);
327 	if (err)
328 		return err;
329 
330 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
331 		int nblocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2;
332 
333 		skcipher_walk_abort(&walk);
334 
335 		skcipher_request_set_tfm(&subreq, tfm);
336 		skcipher_request_set_callback(&subreq,
337 					      skcipher_request_flags(req),
338 					      NULL, NULL);
339 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
340 					   nblocks * SM4_BLOCK_SIZE, req->iv);
341 
342 		err = skcipher_walk_virt(&walk, &subreq, false);
343 		if (err)
344 			return err;
345 	} else {
346 		tail = 0;
347 	}
348 
349 	scoped_ksimd() {
350 		while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
351 			if (nbytes < walk.total)
352 				nbytes &= ~(SM4_BLOCK_SIZE - 1);
353 
354 			if (encrypt)
355 				sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
356 						walk.src.virt.addr, walk.iv, nbytes,
357 						rkey2_enc);
358 			else
359 				sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
360 						walk.src.virt.addr, walk.iv, nbytes,
361 						rkey2_enc);
362 
363 			rkey2_enc = NULL;
364 
365 			err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
366 			if (err)
367 				return err;
368 		}
369 
370 		if (likely(tail == 0))
371 			return 0;
372 
373 		/* handle ciphertext stealing */
374 
375 		dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
376 		if (req->dst != req->src)
377 			dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
378 
379 		skcipher_request_set_crypt(&subreq, src, dst,
380 					   SM4_BLOCK_SIZE + tail, req->iv);
381 
382 		err = skcipher_walk_virt(&walk, &subreq, false);
383 		if (err)
384 			return err;
385 
386 		if (encrypt)
387 			sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
388 					walk.src.virt.addr, walk.iv, walk.nbytes,
389 					rkey2_enc);
390 		else
391 			sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
392 					walk.src.virt.addr, walk.iv, walk.nbytes,
393 					rkey2_enc);
394 	}
395 
396 	return skcipher_walk_done(&walk, 0);
397 }
398 
sm4_xts_encrypt(struct skcipher_request * req)399 static int sm4_xts_encrypt(struct skcipher_request *req)
400 {
401 	return sm4_xts_crypt(req, true);
402 }
403 
sm4_xts_decrypt(struct skcipher_request * req)404 static int sm4_xts_decrypt(struct skcipher_request *req)
405 {
406 	return sm4_xts_crypt(req, false);
407 }
408 
409 static struct skcipher_alg sm4_algs[] = {
410 	{
411 		.base = {
412 			.cra_name		= "ecb(sm4)",
413 			.cra_driver_name	= "ecb-sm4-ce",
414 			.cra_priority		= 400,
415 			.cra_blocksize		= SM4_BLOCK_SIZE,
416 			.cra_ctxsize		= sizeof(struct sm4_ctx),
417 			.cra_module		= THIS_MODULE,
418 		},
419 		.min_keysize	= SM4_KEY_SIZE,
420 		.max_keysize	= SM4_KEY_SIZE,
421 		.setkey		= sm4_setkey,
422 		.encrypt	= sm4_ecb_encrypt,
423 		.decrypt	= sm4_ecb_decrypt,
424 	}, {
425 		.base = {
426 			.cra_name		= "cbc(sm4)",
427 			.cra_driver_name	= "cbc-sm4-ce",
428 			.cra_priority		= 400,
429 			.cra_blocksize		= SM4_BLOCK_SIZE,
430 			.cra_ctxsize		= sizeof(struct sm4_ctx),
431 			.cra_module		= THIS_MODULE,
432 		},
433 		.min_keysize	= SM4_KEY_SIZE,
434 		.max_keysize	= SM4_KEY_SIZE,
435 		.ivsize		= SM4_BLOCK_SIZE,
436 		.setkey		= sm4_setkey,
437 		.encrypt	= sm4_cbc_encrypt,
438 		.decrypt	= sm4_cbc_decrypt,
439 	}, {
440 		.base = {
441 			.cra_name		= "ctr(sm4)",
442 			.cra_driver_name	= "ctr-sm4-ce",
443 			.cra_priority		= 400,
444 			.cra_blocksize		= 1,
445 			.cra_ctxsize		= sizeof(struct sm4_ctx),
446 			.cra_module		= THIS_MODULE,
447 		},
448 		.min_keysize	= SM4_KEY_SIZE,
449 		.max_keysize	= SM4_KEY_SIZE,
450 		.ivsize		= SM4_BLOCK_SIZE,
451 		.chunksize	= SM4_BLOCK_SIZE,
452 		.setkey		= sm4_setkey,
453 		.encrypt	= sm4_ctr_crypt,
454 		.decrypt	= sm4_ctr_crypt,
455 	}, {
456 		.base = {
457 			.cra_name		= "cts(cbc(sm4))",
458 			.cra_driver_name	= "cts-cbc-sm4-ce",
459 			.cra_priority		= 400,
460 			.cra_blocksize		= SM4_BLOCK_SIZE,
461 			.cra_ctxsize		= sizeof(struct sm4_ctx),
462 			.cra_module		= THIS_MODULE,
463 		},
464 		.min_keysize	= SM4_KEY_SIZE,
465 		.max_keysize	= SM4_KEY_SIZE,
466 		.ivsize		= SM4_BLOCK_SIZE,
467 		.walksize	= SM4_BLOCK_SIZE * 2,
468 		.setkey		= sm4_setkey,
469 		.encrypt	= sm4_cbc_cts_encrypt,
470 		.decrypt	= sm4_cbc_cts_decrypt,
471 	}, {
472 		.base = {
473 			.cra_name		= "xts(sm4)",
474 			.cra_driver_name	= "xts-sm4-ce",
475 			.cra_priority		= 400,
476 			.cra_blocksize		= SM4_BLOCK_SIZE,
477 			.cra_ctxsize		= sizeof(struct sm4_xts_ctx),
478 			.cra_module		= THIS_MODULE,
479 		},
480 		.min_keysize	= SM4_KEY_SIZE * 2,
481 		.max_keysize	= SM4_KEY_SIZE * 2,
482 		.ivsize		= SM4_BLOCK_SIZE,
483 		.walksize	= SM4_BLOCK_SIZE * 2,
484 		.setkey		= sm4_xts_setkey,
485 		.encrypt	= sm4_xts_encrypt,
486 		.decrypt	= sm4_xts_decrypt,
487 	}
488 };
489 
sm4_cbcmac_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int key_len)490 static int sm4_cbcmac_setkey(struct crypto_shash *tfm, const u8 *key,
491 			     unsigned int key_len)
492 {
493 	struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
494 
495 	if (key_len != SM4_KEY_SIZE)
496 		return -EINVAL;
497 
498 	scoped_ksimd()
499 		sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
500 				crypto_sm4_fk, crypto_sm4_ck);
501 	return 0;
502 }
503 
sm4_cmac_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int key_len)504 static int sm4_cmac_setkey(struct crypto_shash *tfm, const u8 *key,
505 			   unsigned int key_len)
506 {
507 	struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
508 	be128 *consts = (be128 *)ctx->consts;
509 	u64 a, b;
510 
511 	if (key_len != SM4_KEY_SIZE)
512 		return -EINVAL;
513 
514 	memset(consts, 0, SM4_BLOCK_SIZE);
515 
516 	scoped_ksimd() {
517 		sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
518 				crypto_sm4_fk, crypto_sm4_ck);
519 
520 		/* encrypt the zero block */
521 		sm4_ce_crypt_block(ctx->key.rkey_enc, (u8 *)consts, (const u8 *)consts);
522 	}
523 
524 	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
525 	a = be64_to_cpu(consts[0].a);
526 	b = be64_to_cpu(consts[0].b);
527 	consts[0].a = cpu_to_be64((a << 1) | (b >> 63));
528 	consts[0].b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
529 
530 	a = be64_to_cpu(consts[0].a);
531 	b = be64_to_cpu(consts[0].b);
532 	consts[1].a = cpu_to_be64((a << 1) | (b >> 63));
533 	consts[1].b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
534 
535 	return 0;
536 }
537 
sm4_xcbc_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int key_len)538 static int sm4_xcbc_setkey(struct crypto_shash *tfm, const u8 *key,
539 			   unsigned int key_len)
540 {
541 	struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
542 	u8 __aligned(8) key2[SM4_BLOCK_SIZE];
543 	static u8 const ks[3][SM4_BLOCK_SIZE] = {
544 		{ [0 ... SM4_BLOCK_SIZE - 1] = 0x1},
545 		{ [0 ... SM4_BLOCK_SIZE - 1] = 0x2},
546 		{ [0 ... SM4_BLOCK_SIZE - 1] = 0x3},
547 	};
548 
549 	if (key_len != SM4_KEY_SIZE)
550 		return -EINVAL;
551 
552 	scoped_ksimd() {
553 		sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
554 				crypto_sm4_fk, crypto_sm4_ck);
555 
556 		sm4_ce_crypt_block(ctx->key.rkey_enc, key2, ks[0]);
557 		sm4_ce_crypt(ctx->key.rkey_enc, ctx->consts, ks[1], 2);
558 
559 		sm4_ce_expand_key(key2, ctx->key.rkey_enc, ctx->key.rkey_dec,
560 				crypto_sm4_fk, crypto_sm4_ck);
561 	}
562 
563 	return 0;
564 }
565 
sm4_mac_init(struct shash_desc * desc)566 static int sm4_mac_init(struct shash_desc *desc)
567 {
568 	struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
569 
570 	memset(ctx->digest, 0, SM4_BLOCK_SIZE);
571 	return 0;
572 }
573 
sm4_mac_update(struct shash_desc * desc,const u8 * p,unsigned int len)574 static int sm4_mac_update(struct shash_desc *desc, const u8 *p,
575 			  unsigned int len)
576 {
577 	struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
578 	struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
579 	unsigned int nblocks = len / SM4_BLOCK_SIZE;
580 
581 	len %= SM4_BLOCK_SIZE;
582 	scoped_ksimd()
583 		sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, p,
584 				nblocks, false, true);
585 	return len;
586 }
587 
sm4_cmac_finup(struct shash_desc * desc,const u8 * src,unsigned int len,u8 * out)588 static int sm4_cmac_finup(struct shash_desc *desc, const u8 *src,
589 			  unsigned int len, u8 *out)
590 {
591 	struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
592 	struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
593 	const u8 *consts = tctx->consts;
594 
595 	crypto_xor(ctx->digest, src, len);
596 	if (len != SM4_BLOCK_SIZE) {
597 		ctx->digest[len] ^= 0x80;
598 		consts += SM4_BLOCK_SIZE;
599 	}
600 	scoped_ksimd()
601 		sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, consts, 1,
602 				  false, true);
603 	memcpy(out, ctx->digest, SM4_BLOCK_SIZE);
604 	return 0;
605 }
606 
sm4_cbcmac_finup(struct shash_desc * desc,const u8 * src,unsigned int len,u8 * out)607 static int sm4_cbcmac_finup(struct shash_desc *desc, const u8 *src,
608 			    unsigned int len, u8 *out)
609 {
610 	struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
611 	struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
612 
613 	if (len) {
614 		crypto_xor(ctx->digest, src, len);
615 		scoped_ksimd()
616 			sm4_ce_crypt_block(tctx->key.rkey_enc, ctx->digest,
617 					   ctx->digest);
618 	}
619 	memcpy(out, ctx->digest, SM4_BLOCK_SIZE);
620 	return 0;
621 }
622 
623 static struct shash_alg sm4_mac_algs[] = {
624 	{
625 		.base = {
626 			.cra_name		= "cmac(sm4)",
627 			.cra_driver_name	= "cmac-sm4-ce",
628 			.cra_priority		= 400,
629 			.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
630 						  CRYPTO_AHASH_ALG_FINAL_NONZERO,
631 			.cra_blocksize		= SM4_BLOCK_SIZE,
632 			.cra_ctxsize		= sizeof(struct sm4_mac_tfm_ctx)
633 							+ SM4_BLOCK_SIZE * 2,
634 			.cra_module		= THIS_MODULE,
635 		},
636 		.digestsize	= SM4_BLOCK_SIZE,
637 		.init		= sm4_mac_init,
638 		.update		= sm4_mac_update,
639 		.finup		= sm4_cmac_finup,
640 		.setkey		= sm4_cmac_setkey,
641 		.descsize	= sizeof(struct sm4_mac_desc_ctx),
642 	}, {
643 		.base = {
644 			.cra_name		= "xcbc(sm4)",
645 			.cra_driver_name	= "xcbc-sm4-ce",
646 			.cra_priority		= 400,
647 			.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
648 						  CRYPTO_AHASH_ALG_FINAL_NONZERO,
649 			.cra_blocksize		= SM4_BLOCK_SIZE,
650 			.cra_ctxsize		= sizeof(struct sm4_mac_tfm_ctx)
651 							+ SM4_BLOCK_SIZE * 2,
652 			.cra_module		= THIS_MODULE,
653 		},
654 		.digestsize	= SM4_BLOCK_SIZE,
655 		.init		= sm4_mac_init,
656 		.update		= sm4_mac_update,
657 		.finup		= sm4_cmac_finup,
658 		.setkey		= sm4_xcbc_setkey,
659 		.descsize	= sizeof(struct sm4_mac_desc_ctx),
660 	}, {
661 		.base = {
662 			.cra_name		= "cbcmac(sm4)",
663 			.cra_driver_name	= "cbcmac-sm4-ce",
664 			.cra_priority		= 400,
665 			.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY,
666 			.cra_blocksize		= SM4_BLOCK_SIZE,
667 			.cra_ctxsize		= sizeof(struct sm4_mac_tfm_ctx),
668 			.cra_module		= THIS_MODULE,
669 		},
670 		.digestsize	= SM4_BLOCK_SIZE,
671 		.init		= sm4_mac_init,
672 		.update		= sm4_mac_update,
673 		.finup		= sm4_cbcmac_finup,
674 		.setkey		= sm4_cbcmac_setkey,
675 		.descsize	= sizeof(struct sm4_mac_desc_ctx),
676 	}
677 };
678 
sm4_init(void)679 static int __init sm4_init(void)
680 {
681 	int err;
682 
683 	err = crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
684 	if (err)
685 		return err;
686 
687 	err = crypto_register_shashes(sm4_mac_algs, ARRAY_SIZE(sm4_mac_algs));
688 	if (err)
689 		goto out_err;
690 
691 	return 0;
692 
693 out_err:
694 	crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
695 	return err;
696 }
697 
sm4_exit(void)698 static void __exit sm4_exit(void)
699 {
700 	crypto_unregister_shashes(sm4_mac_algs, ARRAY_SIZE(sm4_mac_algs));
701 	crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
702 }
703 
704 module_cpu_feature_match(SM4, sm4_init);
705 module_exit(sm4_exit);
706 
707 MODULE_DESCRIPTION("SM4 ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
708 MODULE_ALIAS_CRYPTO("sm4-ce");
709 MODULE_ALIAS_CRYPTO("sm4");
710 MODULE_ALIAS_CRYPTO("ecb(sm4)");
711 MODULE_ALIAS_CRYPTO("cbc(sm4)");
712 MODULE_ALIAS_CRYPTO("ctr(sm4)");
713 MODULE_ALIAS_CRYPTO("cts(cbc(sm4))");
714 MODULE_ALIAS_CRYPTO("xts(sm4)");
715 MODULE_ALIAS_CRYPTO("cmac(sm4)");
716 MODULE_ALIAS_CRYPTO("xcbc(sm4)");
717 MODULE_ALIAS_CRYPTO("cbcmac(sm4)");
718 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
719 MODULE_LICENSE("GPL v2");
720