xref: /linux/lib/crypto/arm64/sha256.h (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SHA-256 optimized for ARM64
4  *
5  * Copyright 2025 Google LLC
6  */
7 #include <asm/simd.h>
8 #include <linux/cpufeature.h>
9 
10 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
11 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
12 
13 asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
14 					const u8 *data, size_t nblocks);
15 asmlinkage void sha256_block_neon(struct sha256_block_state *state,
16 				  const u8 *data, size_t nblocks);
17 asmlinkage size_t __sha256_ce_transform(struct sha256_block_state *state,
18 					const u8 *data, size_t nblocks);
19 
20 static void sha256_blocks(struct sha256_block_state *state,
21 			  const u8 *data, size_t nblocks)
22 {
23 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
24 	    static_branch_likely(&have_neon) && likely(may_use_simd())) {
25 		if (static_branch_likely(&have_ce)) {
26 			do {
27 				size_t rem;
28 
29 				scoped_ksimd()
30 					rem = __sha256_ce_transform(state, data,
31 								    nblocks);
32 
33 				data += (nblocks - rem) * SHA256_BLOCK_SIZE;
34 				nblocks = rem;
35 			} while (nblocks);
36 		} else {
37 			scoped_ksimd()
38 				sha256_block_neon(state, data, nblocks);
39 		}
40 	} else {
41 		sha256_block_data_order(state, data, nblocks);
42 	}
43 }
44 
45 static_assert(offsetof(struct __sha256_ctx, state) == 0);
46 static_assert(offsetof(struct __sha256_ctx, bytecount) == 32);
47 static_assert(offsetof(struct __sha256_ctx, buf) == 40);
48 asmlinkage void sha256_ce_finup2x(const struct __sha256_ctx *ctx,
49 				  const u8 *data1, const u8 *data2, int len,
50 				  u8 out1[SHA256_DIGEST_SIZE],
51 				  u8 out2[SHA256_DIGEST_SIZE]);
52 
53 #define sha256_finup_2x_arch sha256_finup_2x_arch
54 static bool sha256_finup_2x_arch(const struct __sha256_ctx *ctx,
55 				 const u8 *data1, const u8 *data2, size_t len,
56 				 u8 out1[SHA256_DIGEST_SIZE],
57 				 u8 out2[SHA256_DIGEST_SIZE])
58 {
59 	/*
60 	 * The assembly requires len >= SHA256_BLOCK_SIZE && len <= INT_MAX.
61 	 * Further limit len to 65536 to avoid spending too long with preemption
62 	 * disabled.  (Of course, in practice len is nearly always 4096 anyway.)
63 	 */
64 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
65 	    static_branch_likely(&have_ce) && len >= SHA256_BLOCK_SIZE &&
66 	    len <= 65536 && likely(may_use_simd())) {
67 		scoped_ksimd()
68 			sha256_ce_finup2x(ctx, data1, data2, len, out1, out2);
69 		kmsan_unpoison_memory(out1, SHA256_DIGEST_SIZE);
70 		kmsan_unpoison_memory(out2, SHA256_DIGEST_SIZE);
71 		return true;
72 	}
73 	return false;
74 }
75 
76 static bool sha256_finup_2x_is_optimized_arch(void)
77 {
78 	return static_key_enabled(&have_ce);
79 }
80 
81 #ifdef CONFIG_KERNEL_MODE_NEON
82 #define sha256_mod_init_arch sha256_mod_init_arch
83 static void sha256_mod_init_arch(void)
84 {
85 	if (cpu_have_named_feature(ASIMD)) {
86 		static_branch_enable(&have_neon);
87 		if (cpu_have_named_feature(SHA2))
88 			static_branch_enable(&have_ce);
89 	}
90 }
91 #endif /* CONFIG_KERNEL_MODE_NEON */
92