1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * SHA-256 optimized for x86_64 4 * 5 * Copyright 2025 Google LLC 6 */ 7 #include <asm/fpu/api.h> 8 #include <linux/static_call.h> 9 10 DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_blocks_generic); 11 12 #define DEFINE_X86_SHA256_FN(c_fn, asm_fn) \ 13 asmlinkage void asm_fn(struct sha256_block_state *state, \ 14 const u8 *data, size_t nblocks); \ 15 static void c_fn(struct sha256_block_state *state, const u8 *data, \ 16 size_t nblocks) \ 17 { \ 18 if (likely(irq_fpu_usable())) { \ 19 kernel_fpu_begin(); \ 20 asm_fn(state, data, nblocks); \ 21 kernel_fpu_end(); \ 22 } else { \ 23 sha256_blocks_generic(state, data, nblocks); \ 24 } \ 25 } 26 27 DEFINE_X86_SHA256_FN(sha256_blocks_ssse3, sha256_transform_ssse3); 28 DEFINE_X86_SHA256_FN(sha256_blocks_avx, sha256_transform_avx); 29 DEFINE_X86_SHA256_FN(sha256_blocks_avx2, sha256_transform_rorx); 30 DEFINE_X86_SHA256_FN(sha256_blocks_ni, sha256_ni_transform); 31 32 static void sha256_blocks(struct sha256_block_state *state, 33 const u8 *data, size_t nblocks) 34 { 35 static_call(sha256_blocks_x86)(state, data, nblocks); 36 } 37 38 #define sha256_mod_init_arch sha256_mod_init_arch 39 static inline void sha256_mod_init_arch(void) 40 { 41 if (boot_cpu_has(X86_FEATURE_SHA_NI)) { 42 static_call_update(sha256_blocks_x86, sha256_blocks_ni); 43 } else if (cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, 44 NULL) && 45 boot_cpu_has(X86_FEATURE_AVX)) { 46 if (boot_cpu_has(X86_FEATURE_AVX2) && 47 boot_cpu_has(X86_FEATURE_BMI2)) 48 static_call_update(sha256_blocks_x86, 49 sha256_blocks_avx2); 50 else 51 static_call_update(sha256_blocks_x86, 52 sha256_blocks_avx); 53 } else if (boot_cpu_has(X86_FEATURE_SSSE3)) { 54 static_call_update(sha256_blocks_x86, sha256_blocks_ssse3); 55 } 56 } 57