1*24c91b62SEric Biggers /* SPDX-License-Identifier: GPL-2.0-or-later */ 2*24c91b62SEric Biggers /* 3*24c91b62SEric Biggers * arm32-optimized SHA-512 block function 4*24c91b62SEric Biggers * 5*24c91b62SEric Biggers * Copyright 2025 Google LLC 6*24c91b62SEric Biggers */ 7*24c91b62SEric Biggers 8*24c91b62SEric Biggers #include <asm/neon.h> 9*24c91b62SEric Biggers #include <crypto/internal/simd.h> 10*24c91b62SEric Biggers 11*24c91b62SEric Biggers static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon); 12*24c91b62SEric Biggers 13*24c91b62SEric Biggers asmlinkage void sha512_block_data_order(struct sha512_block_state *state, 14*24c91b62SEric Biggers const u8 *data, size_t nblocks); 15*24c91b62SEric Biggers asmlinkage void sha512_block_data_order_neon(struct sha512_block_state *state, 16*24c91b62SEric Biggers const u8 *data, size_t nblocks); 17*24c91b62SEric Biggers 18*24c91b62SEric Biggers static void sha512_blocks(struct sha512_block_state *state, 19*24c91b62SEric Biggers const u8 *data, size_t nblocks) 20*24c91b62SEric Biggers { 21*24c91b62SEric Biggers if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && 22*24c91b62SEric Biggers static_branch_likely(&have_neon) && likely(crypto_simd_usable())) { 23*24c91b62SEric Biggers kernel_neon_begin(); 24*24c91b62SEric Biggers sha512_block_data_order_neon(state, data, nblocks); 25*24c91b62SEric Biggers kernel_neon_end(); 26*24c91b62SEric Biggers } else { 27*24c91b62SEric Biggers sha512_block_data_order(state, data, nblocks); 28*24c91b62SEric Biggers } 29*24c91b62SEric Biggers } 30*24c91b62SEric Biggers 31*24c91b62SEric Biggers #ifdef CONFIG_KERNEL_MODE_NEON 32*24c91b62SEric Biggers #define sha512_mod_init_arch sha512_mod_init_arch 33*24c91b62SEric Biggers static inline void sha512_mod_init_arch(void) 34*24c91b62SEric Biggers { 35*24c91b62SEric Biggers if (cpu_has_neon()) 36*24c91b62SEric Biggers static_branch_enable(&have_neon); 37*24c91b62SEric Biggers } 38*24c91b62SEric Biggers #endif /* CONFIG_KERNEL_MODE_NEON */ 39