xref: /linux/lib/crypto/x86/sha256.h (revision 13150742b09e720fdf021de14cd2b98b37415a89)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SHA-256 optimized for x86_64
4  *
5  * Copyright 2025 Google LLC
6  */
7 #include <asm/fpu/api.h>
8 #include <crypto/internal/simd.h>
9 #include <linux/static_call.h>
10 
11 DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_blocks_generic);
12 
13 #define DEFINE_X86_SHA256_FN(c_fn, asm_fn)                                 \
14 	asmlinkage void asm_fn(struct sha256_block_state *state,           \
15 			       const u8 *data, size_t nblocks);            \
16 	static void c_fn(struct sha256_block_state *state, const u8 *data, \
17 			 size_t nblocks)                                   \
18 	{                                                                  \
19 		if (likely(crypto_simd_usable())) {                        \
20 			kernel_fpu_begin();                                \
21 			asm_fn(state, data, nblocks);                      \
22 			kernel_fpu_end();                                  \
23 		} else {                                                   \
24 			sha256_blocks_generic(state, data, nblocks);       \
25 		}                                                          \
26 	}
27 
28 DEFINE_X86_SHA256_FN(sha256_blocks_ssse3, sha256_transform_ssse3);
29 DEFINE_X86_SHA256_FN(sha256_blocks_avx, sha256_transform_avx);
30 DEFINE_X86_SHA256_FN(sha256_blocks_avx2, sha256_transform_rorx);
31 DEFINE_X86_SHA256_FN(sha256_blocks_ni, sha256_ni_transform);
32 
sha256_blocks(struct sha256_block_state * state,const u8 * data,size_t nblocks)33 static void sha256_blocks(struct sha256_block_state *state,
34 			  const u8 *data, size_t nblocks)
35 {
36 	static_call(sha256_blocks_x86)(state, data, nblocks);
37 }
38 
39 #define sha256_mod_init_arch sha256_mod_init_arch
sha256_mod_init_arch(void)40 static inline void sha256_mod_init_arch(void)
41 {
42 	if (boot_cpu_has(X86_FEATURE_SHA_NI)) {
43 		static_call_update(sha256_blocks_x86, sha256_blocks_ni);
44 	} else if (cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
45 				     NULL) &&
46 		   boot_cpu_has(X86_FEATURE_AVX)) {
47 		if (boot_cpu_has(X86_FEATURE_AVX2) &&
48 		    boot_cpu_has(X86_FEATURE_BMI2))
49 			static_call_update(sha256_blocks_x86,
50 					   sha256_blocks_avx2);
51 		else
52 			static_call_update(sha256_blocks_x86,
53 					   sha256_blocks_avx);
54 	} else if (boot_cpu_has(X86_FEATURE_SSSE3)) {
55 		static_call_update(sha256_blocks_x86, sha256_blocks_ssse3);
56 	}
57 }
58