xref: /linux/lib/crypto/arm64/sha256.h (revision aec2f682d47c54ef434b2d440992626d80b1ebdc)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SHA-256 optimized for ARM64
4  *
5  * Copyright 2025 Google LLC
6  */
7 #include <asm/simd.h>
8 #include <linux/cpufeature.h>
9 
10 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
11 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
12 
13 asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
14 					const u8 *data, size_t nblocks);
15 asmlinkage void sha256_block_neon(struct sha256_block_state *state,
16 				  const u8 *data, size_t nblocks);
17 asmlinkage void sha256_ce_transform(struct sha256_block_state *state,
18 				    const u8 *data, size_t nblocks);
19 
20 static void sha256_blocks(struct sha256_block_state *state,
21 			  const u8 *data, size_t nblocks)
22 {
23 	if (static_branch_likely(&have_neon) && likely(may_use_simd())) {
24 		scoped_ksimd() {
25 			if (static_branch_likely(&have_ce))
26 				sha256_ce_transform(state, data, nblocks);
27 			else
28 				sha256_block_neon(state, data, nblocks);
29 		}
30 	} else {
31 		sha256_block_data_order(state, data, nblocks);
32 	}
33 }
34 
35 static_assert(offsetof(struct __sha256_ctx, state) == 0);
36 static_assert(offsetof(struct __sha256_ctx, bytecount) == 32);
37 static_assert(offsetof(struct __sha256_ctx, buf) == 40);
38 asmlinkage void sha256_ce_finup2x(const struct __sha256_ctx *ctx,
39 				  const u8 *data1, const u8 *data2, int len,
40 				  u8 out1[SHA256_DIGEST_SIZE],
41 				  u8 out2[SHA256_DIGEST_SIZE]);
42 
43 #define sha256_finup_2x_arch sha256_finup_2x_arch
44 static bool sha256_finup_2x_arch(const struct __sha256_ctx *ctx,
45 				 const u8 *data1, const u8 *data2, size_t len,
46 				 u8 out1[SHA256_DIGEST_SIZE],
47 				 u8 out2[SHA256_DIGEST_SIZE])
48 {
49 	/* The assembly requires len >= SHA256_BLOCK_SIZE && len <= INT_MAX. */
50 	if (static_branch_likely(&have_ce) && len >= SHA256_BLOCK_SIZE &&
51 	    len <= INT_MAX && likely(may_use_simd())) {
52 		scoped_ksimd()
53 			sha256_ce_finup2x(ctx, data1, data2, len, out1, out2);
54 		kmsan_unpoison_memory(out1, SHA256_DIGEST_SIZE);
55 		kmsan_unpoison_memory(out2, SHA256_DIGEST_SIZE);
56 		return true;
57 	}
58 	return false;
59 }
60 
61 static bool sha256_finup_2x_is_optimized_arch(void)
62 {
63 	return static_key_enabled(&have_ce);
64 }
65 
66 #define sha256_mod_init_arch sha256_mod_init_arch
67 static void sha256_mod_init_arch(void)
68 {
69 	if (cpu_have_named_feature(ASIMD)) {
70 		static_branch_enable(&have_neon);
71 		if (cpu_have_named_feature(SHA2))
72 			static_branch_enable(&have_ce);
73 	}
74 }
75