xref: /linux/lib/crypto/arm64/sha256.h (revision 13150742b09e720fdf021de14cd2b98b37415a89)
1*e96cb950SEric Biggers /* SPDX-License-Identifier: GPL-2.0-or-later */
2*e96cb950SEric Biggers /*
3*e96cb950SEric Biggers  * SHA-256 optimized for ARM64
4*e96cb950SEric Biggers  *
5*e96cb950SEric Biggers  * Copyright 2025 Google LLC
6*e96cb950SEric Biggers  */
7*e96cb950SEric Biggers #include <asm/neon.h>
8*e96cb950SEric Biggers #include <crypto/internal/simd.h>
9*e96cb950SEric Biggers #include <linux/cpufeature.h>
10*e96cb950SEric Biggers 
11*e96cb950SEric Biggers asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
12*e96cb950SEric Biggers 					const u8 *data, size_t nblocks);
13*e96cb950SEric Biggers asmlinkage void sha256_block_neon(struct sha256_block_state *state,
14*e96cb950SEric Biggers 				  const u8 *data, size_t nblocks);
15*e96cb950SEric Biggers asmlinkage size_t __sha256_ce_transform(struct sha256_block_state *state,
16*e96cb950SEric Biggers 					const u8 *data, size_t nblocks);
17*e96cb950SEric Biggers 
18*e96cb950SEric Biggers static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
19*e96cb950SEric Biggers static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
20*e96cb950SEric Biggers 
sha256_blocks(struct sha256_block_state * state,const u8 * data,size_t nblocks)21*e96cb950SEric Biggers static void sha256_blocks(struct sha256_block_state *state,
22*e96cb950SEric Biggers 			  const u8 *data, size_t nblocks)
23*e96cb950SEric Biggers {
24*e96cb950SEric Biggers 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
25*e96cb950SEric Biggers 	    static_branch_likely(&have_neon) && crypto_simd_usable()) {
26*e96cb950SEric Biggers 		if (static_branch_likely(&have_ce)) {
27*e96cb950SEric Biggers 			do {
28*e96cb950SEric Biggers 				size_t rem;
29*e96cb950SEric Biggers 
30*e96cb950SEric Biggers 				kernel_neon_begin();
31*e96cb950SEric Biggers 				rem = __sha256_ce_transform(state,
32*e96cb950SEric Biggers 							    data, nblocks);
33*e96cb950SEric Biggers 				kernel_neon_end();
34*e96cb950SEric Biggers 				data += (nblocks - rem) * SHA256_BLOCK_SIZE;
35*e96cb950SEric Biggers 				nblocks = rem;
36*e96cb950SEric Biggers 			} while (nblocks);
37*e96cb950SEric Biggers 		} else {
38*e96cb950SEric Biggers 			kernel_neon_begin();
39*e96cb950SEric Biggers 			sha256_block_neon(state, data, nblocks);
40*e96cb950SEric Biggers 			kernel_neon_end();
41*e96cb950SEric Biggers 		}
42*e96cb950SEric Biggers 	} else {
43*e96cb950SEric Biggers 		sha256_block_data_order(state, data, nblocks);
44*e96cb950SEric Biggers 	}
45*e96cb950SEric Biggers }
46*e96cb950SEric Biggers 
47*e96cb950SEric Biggers #ifdef CONFIG_KERNEL_MODE_NEON
48*e96cb950SEric Biggers #define sha256_mod_init_arch sha256_mod_init_arch
sha256_mod_init_arch(void)49*e96cb950SEric Biggers static inline void sha256_mod_init_arch(void)
50*e96cb950SEric Biggers {
51*e96cb950SEric Biggers 	if (cpu_have_named_feature(ASIMD)) {
52*e96cb950SEric Biggers 		static_branch_enable(&have_neon);
53*e96cb950SEric Biggers 		if (cpu_have_named_feature(SHA2))
54*e96cb950SEric Biggers 			static_branch_enable(&have_ce);
55*e96cb950SEric Biggers 	}
56*e96cb950SEric Biggers }
57*e96cb950SEric Biggers #endif /* CONFIG_KERNEL_MODE_NEON */
58