xref: /linux/lib/crypto/x86/sha256.h (revision a8c60a9aca778d7fd22d6c9b1af702d6f952b87f)
1e96cb950SEric Biggers /* SPDX-License-Identifier: GPL-2.0-or-later */
2e96cb950SEric Biggers /*
3e96cb950SEric Biggers  * SHA-256 optimized for x86_64
4e96cb950SEric Biggers  *
5e96cb950SEric Biggers  * Copyright 2025 Google LLC
6e96cb950SEric Biggers  */
7e96cb950SEric Biggers #include <asm/fpu/api.h>
8e96cb950SEric Biggers #include <crypto/internal/simd.h>
9e96cb950SEric Biggers #include <linux/static_call.h>
10e96cb950SEric Biggers 
11*a8c60a9aSEric Biggers DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_blocks_generic);
12e96cb950SEric Biggers 
13*a8c60a9aSEric Biggers #define DEFINE_X86_SHA256_FN(c_fn, asm_fn)                                 \
14*a8c60a9aSEric Biggers 	asmlinkage void asm_fn(struct sha256_block_state *state,           \
15*a8c60a9aSEric Biggers 			       const u8 *data, size_t nblocks);            \
16*a8c60a9aSEric Biggers 	static void c_fn(struct sha256_block_state *state, const u8 *data, \
17*a8c60a9aSEric Biggers 			 size_t nblocks)                                   \
18*a8c60a9aSEric Biggers 	{                                                                  \
19*a8c60a9aSEric Biggers 		if (likely(crypto_simd_usable())) {                        \
20*a8c60a9aSEric Biggers 			kernel_fpu_begin();                                \
21*a8c60a9aSEric Biggers 			asm_fn(state, data, nblocks);                      \
22*a8c60a9aSEric Biggers 			kernel_fpu_end();                                  \
23*a8c60a9aSEric Biggers 		} else {                                                   \
24*a8c60a9aSEric Biggers 			sha256_blocks_generic(state, data, nblocks);       \
25*a8c60a9aSEric Biggers 		}                                                          \
26*a8c60a9aSEric Biggers 	}
27e96cb950SEric Biggers 
28*a8c60a9aSEric Biggers DEFINE_X86_SHA256_FN(sha256_blocks_ssse3, sha256_transform_ssse3);
29*a8c60a9aSEric Biggers DEFINE_X86_SHA256_FN(sha256_blocks_avx, sha256_transform_avx);
30*a8c60a9aSEric Biggers DEFINE_X86_SHA256_FN(sha256_blocks_avx2, sha256_transform_rorx);
31*a8c60a9aSEric Biggers DEFINE_X86_SHA256_FN(sha256_blocks_ni, sha256_ni_transform);
32e96cb950SEric Biggers 
33e96cb950SEric Biggers static void sha256_blocks(struct sha256_block_state *state,
34e96cb950SEric Biggers 			  const u8 *data, size_t nblocks)
35e96cb950SEric Biggers {
36e96cb950SEric Biggers 	static_call(sha256_blocks_x86)(state, data, nblocks);
37e96cb950SEric Biggers }
38e96cb950SEric Biggers 
39e96cb950SEric Biggers #define sha256_mod_init_arch sha256_mod_init_arch
40e96cb950SEric Biggers static inline void sha256_mod_init_arch(void)
41e96cb950SEric Biggers {
42e96cb950SEric Biggers 	if (boot_cpu_has(X86_FEATURE_SHA_NI)) {
43*a8c60a9aSEric Biggers 		static_call_update(sha256_blocks_x86, sha256_blocks_ni);
44e96cb950SEric Biggers 	} else if (cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
45e96cb950SEric Biggers 				     NULL) &&
46e96cb950SEric Biggers 		   boot_cpu_has(X86_FEATURE_AVX)) {
47e96cb950SEric Biggers 		if (boot_cpu_has(X86_FEATURE_AVX2) &&
48e96cb950SEric Biggers 		    boot_cpu_has(X86_FEATURE_BMI2))
49e96cb950SEric Biggers 			static_call_update(sha256_blocks_x86,
50*a8c60a9aSEric Biggers 					   sha256_blocks_avx2);
51e96cb950SEric Biggers 		else
52e96cb950SEric Biggers 			static_call_update(sha256_blocks_x86,
53*a8c60a9aSEric Biggers 					   sha256_blocks_avx);
54*a8c60a9aSEric Biggers 	} else if (boot_cpu_has(X86_FEATURE_SSSE3)) {
55*a8c60a9aSEric Biggers 		static_call_update(sha256_blocks_x86, sha256_blocks_ssse3);
56e96cb950SEric Biggers 	}
57e96cb950SEric Biggers }
58