1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * SHA-256 optimized for ARM 4 * 5 * Copyright 2025 Google LLC 6 */ 7 #include <asm/neon.h> 8 #include <crypto/internal/simd.h> 9 10 asmlinkage void sha256_block_data_order(struct sha256_block_state *state, 11 const u8 *data, size_t nblocks); 12 asmlinkage void sha256_block_data_order_neon(struct sha256_block_state *state, 13 const u8 *data, size_t nblocks); 14 asmlinkage void sha256_ce_transform(struct sha256_block_state *state, 15 const u8 *data, size_t nblocks); 16 17 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon); 18 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce); 19 20 static void sha256_blocks(struct sha256_block_state *state, 21 const u8 *data, size_t nblocks) 22 { 23 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && 24 static_branch_likely(&have_neon) && crypto_simd_usable()) { 25 kernel_neon_begin(); 26 if (static_branch_likely(&have_ce)) 27 sha256_ce_transform(state, data, nblocks); 28 else 29 sha256_block_data_order_neon(state, data, nblocks); 30 kernel_neon_end(); 31 } else { 32 sha256_block_data_order(state, data, nblocks); 33 } 34 } 35 36 #ifdef CONFIG_KERNEL_MODE_NEON 37 #define sha256_mod_init_arch sha256_mod_init_arch 38 static inline void sha256_mod_init_arch(void) 39 { 40 if (elf_hwcap & HWCAP_NEON) { 41 static_branch_enable(&have_neon); 42 if (elf_hwcap2 & HWCAP2_SHA2) 43 static_branch_enable(&have_ce); 44 } 45 } 46 #endif /* CONFIG_KERNEL_MODE_NEON */ 47