xref: /linux/arch/x86/crypto/sm3_avx_glue.c (revision bca5cfbb694d66a1c482d0c347eee80f6afbc870)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SM3 Secure Hash Algorithm, AVX assembler accelerated.
4  * specified in: https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
5  *
6  * Copyright (C) 2021 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
7  */
8 
9 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
10 
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sm3.h>
14 #include <crypto/sm3_base.h>
15 #include <linux/cpufeature.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 
19 asmlinkage void sm3_transform_avx(struct sm3_state *state,
20 			const u8 *data, int nblocks);
21 
22 static int sm3_avx_update(struct shash_desc *desc, const u8 *data,
23 			 unsigned int len)
24 {
25 	int remain;
26 
27 	/*
28 	 * Make sure struct sm3_state begins directly with the SM3
29 	 * 256-bit internal state, as this is what the asm functions expect.
30 	 */
31 	BUILD_BUG_ON(offsetof(struct sm3_state, state) != 0);
32 
33 	kernel_fpu_begin();
34 	remain = sm3_base_do_update_blocks(desc, data, len, sm3_transform_avx);
35 	kernel_fpu_end();
36 	return remain;
37 }
38 
39 static int sm3_avx_finup(struct shash_desc *desc, const u8 *data,
40 		      unsigned int len, u8 *out)
41 {
42 	kernel_fpu_begin();
43 	sm3_base_do_finup(desc, data, len, sm3_transform_avx);
44 	kernel_fpu_end();
45 	return sm3_base_finish(desc, out);
46 }
47 
48 static struct shash_alg sm3_avx_alg = {
49 	.digestsize	=	SM3_DIGEST_SIZE,
50 	.init		=	sm3_base_init,
51 	.update		=	sm3_avx_update,
52 	.finup		=	sm3_avx_finup,
53 	.descsize	=	SM3_STATE_SIZE,
54 	.base		=	{
55 		.cra_name	=	"sm3",
56 		.cra_driver_name =	"sm3-avx",
57 		.cra_priority	=	300,
58 		.cra_flags	 =	CRYPTO_AHASH_ALG_BLOCK_ONLY |
59 					CRYPTO_AHASH_ALG_FINUP_MAX,
60 		.cra_blocksize	=	SM3_BLOCK_SIZE,
61 		.cra_module	=	THIS_MODULE,
62 	}
63 };
64 
65 static int __init sm3_avx_mod_init(void)
66 {
67 	const char *feature_name;
68 
69 	if (!boot_cpu_has(X86_FEATURE_AVX)) {
70 		pr_info("AVX instruction are not detected.\n");
71 		return -ENODEV;
72 	}
73 
74 	if (!boot_cpu_has(X86_FEATURE_BMI2)) {
75 		pr_info("BMI2 instruction are not detected.\n");
76 		return -ENODEV;
77 	}
78 
79 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
80 				&feature_name)) {
81 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
82 		return -ENODEV;
83 	}
84 
85 	return crypto_register_shash(&sm3_avx_alg);
86 }
87 
88 static void __exit sm3_avx_mod_exit(void)
89 {
90 	crypto_unregister_shash(&sm3_avx_alg);
91 }
92 
93 module_init(sm3_avx_mod_init);
94 module_exit(sm3_avx_mod_exit);
95 
96 MODULE_LICENSE("GPL v2");
97 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
98 MODULE_DESCRIPTION("SM3 Secure Hash Algorithm, AVX assembler accelerated");
99 MODULE_ALIAS_CRYPTO("sm3");
100 MODULE_ALIAS_CRYPTO("sm3-avx");
101