xref: /linux/lib/crypto/x86/sha512.h (revision 13150742b09e720fdf021de14cd2b98b37415a89)
1*484c1811SEric Biggers /* SPDX-License-Identifier: GPL-2.0-or-later */
2*484c1811SEric Biggers /*
3*484c1811SEric Biggers  * x86-optimized SHA-512 block function
4*484c1811SEric Biggers  *
5*484c1811SEric Biggers  * Copyright 2025 Google LLC
6*484c1811SEric Biggers  */
7*484c1811SEric Biggers 
8*484c1811SEric Biggers #include <asm/fpu/api.h>
9*484c1811SEric Biggers #include <crypto/internal/simd.h>
10*484c1811SEric Biggers #include <linux/static_call.h>
11*484c1811SEric Biggers 
12*484c1811SEric Biggers DEFINE_STATIC_CALL(sha512_blocks_x86, sha512_blocks_generic);
13*484c1811SEric Biggers 
14*484c1811SEric Biggers #define DEFINE_X86_SHA512_FN(c_fn, asm_fn)                                 \
15*484c1811SEric Biggers 	asmlinkage void asm_fn(struct sha512_block_state *state,           \
16*484c1811SEric Biggers 			       const u8 *data, size_t nblocks);            \
17*484c1811SEric Biggers 	static void c_fn(struct sha512_block_state *state, const u8 *data, \
18*484c1811SEric Biggers 			 size_t nblocks)                                   \
19*484c1811SEric Biggers 	{                                                                  \
20*484c1811SEric Biggers 		if (likely(crypto_simd_usable())) {                        \
21*484c1811SEric Biggers 			kernel_fpu_begin();                                \
22*484c1811SEric Biggers 			asm_fn(state, data, nblocks);                      \
23*484c1811SEric Biggers 			kernel_fpu_end();                                  \
24*484c1811SEric Biggers 		} else {                                                   \
25*484c1811SEric Biggers 			sha512_blocks_generic(state, data, nblocks);       \
26*484c1811SEric Biggers 		}                                                          \
27*484c1811SEric Biggers 	}
28*484c1811SEric Biggers 
29*484c1811SEric Biggers DEFINE_X86_SHA512_FN(sha512_blocks_ssse3, sha512_transform_ssse3);
30*484c1811SEric Biggers DEFINE_X86_SHA512_FN(sha512_blocks_avx, sha512_transform_avx);
31*484c1811SEric Biggers DEFINE_X86_SHA512_FN(sha512_blocks_avx2, sha512_transform_rorx);
32*484c1811SEric Biggers 
sha512_blocks(struct sha512_block_state * state,const u8 * data,size_t nblocks)33*484c1811SEric Biggers static void sha512_blocks(struct sha512_block_state *state,
34*484c1811SEric Biggers 			  const u8 *data, size_t nblocks)
35*484c1811SEric Biggers {
36*484c1811SEric Biggers 	static_call(sha512_blocks_x86)(state, data, nblocks);
37*484c1811SEric Biggers }
38*484c1811SEric Biggers 
39*484c1811SEric Biggers #define sha512_mod_init_arch sha512_mod_init_arch
sha512_mod_init_arch(void)40*484c1811SEric Biggers static inline void sha512_mod_init_arch(void)
41*484c1811SEric Biggers {
42*484c1811SEric Biggers 	if (cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL) &&
43*484c1811SEric Biggers 	    boot_cpu_has(X86_FEATURE_AVX)) {
44*484c1811SEric Biggers 		if (boot_cpu_has(X86_FEATURE_AVX2) &&
45*484c1811SEric Biggers 		    boot_cpu_has(X86_FEATURE_BMI2))
46*484c1811SEric Biggers 			static_call_update(sha512_blocks_x86,
47*484c1811SEric Biggers 					   sha512_blocks_avx2);
48*484c1811SEric Biggers 		else
49*484c1811SEric Biggers 			static_call_update(sha512_blocks_x86,
50*484c1811SEric Biggers 					   sha512_blocks_avx);
51*484c1811SEric Biggers 	} else if (boot_cpu_has(X86_FEATURE_SSSE3)) {
52*484c1811SEric Biggers 		static_call_update(sha512_blocks_x86, sha512_blocks_ssse3);
53*484c1811SEric Biggers 	}
54*484c1811SEric Biggers }
55