xref: /linux/lib/crypto/tests/sha256_kunit.c (revision 1896ce8eb6c61824f6c1125d69d8fda1f44a22f8)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright 2025 Google LLC
4  */
5 #include <crypto/sha2.h>
6 #include "sha256-testvecs.h"
7 
8 /* Generate the HASH_KUNIT_CASES using hash-test-template.h. */
9 #define HASH sha256
10 #define HASH_CTX sha256_ctx
11 #define HASH_SIZE SHA256_DIGEST_SIZE
12 #define HASH_INIT sha256_init
13 #define HASH_UPDATE sha256_update
14 #define HASH_FINAL sha256_final
15 #define HMAC_KEY hmac_sha256_key
16 #define HMAC_CTX hmac_sha256_ctx
17 #define HMAC_PREPAREKEY hmac_sha256_preparekey
18 #define HMAC_INIT hmac_sha256_init
19 #define HMAC_UPDATE hmac_sha256_update
20 #define HMAC_FINAL hmac_sha256_final
21 #define HMAC hmac_sha256
22 #define HMAC_USINGRAWKEY hmac_sha256_usingrawkey
23 #include "hash-test-template.h"
24 
free_guarded_buf(void * buf)25 static void free_guarded_buf(void *buf)
26 {
27 	vfree(buf);
28 }
29 
30 /*
31  * Allocate a KUnit-managed buffer that has length @len bytes immediately
32  * followed by an unmapped page, and assert that the allocation succeeds.
33  */
alloc_guarded_buf(struct kunit * test,size_t len)34 static void *alloc_guarded_buf(struct kunit *test, size_t len)
35 {
36 	size_t full_len = round_up(len, PAGE_SIZE);
37 	void *buf = vmalloc(full_len);
38 
39 	KUNIT_ASSERT_NOT_NULL(test, buf);
40 	KUNIT_ASSERT_EQ(test, 0,
41 			kunit_add_action_or_reset(test, free_guarded_buf, buf));
42 	return buf + full_len - len;
43 }
44 
45 /*
46  * Test for sha256_finup_2x().  Specifically, choose various data lengths and
47  * salt lengths, and for each one, verify that sha256_finup_2x() produces the
48  * same results as sha256_update() and sha256_final().
49  *
50  * Use guarded buffers for all inputs and outputs to reliably detect any
51  * out-of-bounds reads or writes, even if they occur in assembly code.
52  */
test_sha256_finup_2x(struct kunit * test)53 static void test_sha256_finup_2x(struct kunit *test)
54 {
55 	const size_t max_data_len = 16384;
56 	u8 *data1_buf, *data2_buf, *hash1, *hash2;
57 	u8 expected_hash1[SHA256_DIGEST_SIZE];
58 	u8 expected_hash2[SHA256_DIGEST_SIZE];
59 	u8 salt[SHA256_BLOCK_SIZE];
60 	struct sha256_ctx *ctx;
61 
62 	data1_buf = alloc_guarded_buf(test, max_data_len);
63 	data2_buf = alloc_guarded_buf(test, max_data_len);
64 	hash1 = alloc_guarded_buf(test, SHA256_DIGEST_SIZE);
65 	hash2 = alloc_guarded_buf(test, SHA256_DIGEST_SIZE);
66 	ctx = alloc_guarded_buf(test, sizeof(*ctx));
67 
68 	rand_bytes(data1_buf, max_data_len);
69 	rand_bytes(data2_buf, max_data_len);
70 	rand_bytes(salt, sizeof(salt));
71 
72 	for (size_t i = 0; i < 500; i++) {
73 		size_t salt_len = rand_length(sizeof(salt));
74 		size_t data_len = rand_length(max_data_len);
75 		const u8 *data1 = data1_buf + max_data_len - data_len;
76 		const u8 *data2 = data2_buf + max_data_len - data_len;
77 		struct sha256_ctx orig_ctx;
78 
79 		sha256_init(ctx);
80 		sha256_update(ctx, salt, salt_len);
81 		orig_ctx = *ctx;
82 
83 		sha256_finup_2x(ctx, data1, data2, data_len, hash1, hash2);
84 		KUNIT_ASSERT_MEMEQ_MSG(
85 			test, ctx, &orig_ctx, sizeof(*ctx),
86 			"sha256_finup_2x() modified its ctx argument");
87 
88 		sha256_update(ctx, data1, data_len);
89 		sha256_final(ctx, expected_hash1);
90 		sha256_update(&orig_ctx, data2, data_len);
91 		sha256_final(&orig_ctx, expected_hash2);
92 		KUNIT_ASSERT_MEMEQ_MSG(
93 			test, hash1, expected_hash1, SHA256_DIGEST_SIZE,
94 			"Wrong hash1 with salt_len=%zu data_len=%zu", salt_len,
95 			data_len);
96 		KUNIT_ASSERT_MEMEQ_MSG(
97 			test, hash2, expected_hash2, SHA256_DIGEST_SIZE,
98 			"Wrong hash2 with salt_len=%zu data_len=%zu", salt_len,
99 			data_len);
100 	}
101 }
102 
103 /* Test sha256_finup_2x() with ctx == NULL */
test_sha256_finup_2x_defaultctx(struct kunit * test)104 static void test_sha256_finup_2x_defaultctx(struct kunit *test)
105 {
106 	const size_t data_len = 128;
107 	struct sha256_ctx ctx;
108 	u8 hash1_a[SHA256_DIGEST_SIZE];
109 	u8 hash2_a[SHA256_DIGEST_SIZE];
110 	u8 hash1_b[SHA256_DIGEST_SIZE];
111 	u8 hash2_b[SHA256_DIGEST_SIZE];
112 
113 	rand_bytes(test_buf, 2 * data_len);
114 
115 	sha256_init(&ctx);
116 	sha256_finup_2x(&ctx, test_buf, &test_buf[data_len], data_len, hash1_a,
117 			hash2_a);
118 
119 	sha256_finup_2x(NULL, test_buf, &test_buf[data_len], data_len, hash1_b,
120 			hash2_b);
121 
122 	KUNIT_ASSERT_MEMEQ(test, hash1_a, hash1_b, SHA256_DIGEST_SIZE);
123 	KUNIT_ASSERT_MEMEQ(test, hash2_a, hash2_b, SHA256_DIGEST_SIZE);
124 }
125 
126 /*
127  * Test that sha256_finup_2x() and sha256_update/final() produce consistent
128  * results with total message lengths that require more than 32 bits.
129  */
test_sha256_finup_2x_hugelen(struct kunit * test)130 static void test_sha256_finup_2x_hugelen(struct kunit *test)
131 {
132 	const size_t data_len = 4 * SHA256_BLOCK_SIZE;
133 	struct sha256_ctx ctx = {};
134 	u8 expected_hash[SHA256_DIGEST_SIZE];
135 	u8 hash[SHA256_DIGEST_SIZE];
136 
137 	rand_bytes(test_buf, data_len);
138 	for (size_t align = 0; align < SHA256_BLOCK_SIZE; align++) {
139 		sha256_init(&ctx);
140 		ctx.ctx.bytecount = 0x123456789abcd00 + align;
141 
142 		sha256_finup_2x(&ctx, test_buf, test_buf, data_len, hash, hash);
143 
144 		sha256_update(&ctx, test_buf, data_len);
145 		sha256_final(&ctx, expected_hash);
146 
147 		KUNIT_ASSERT_MEMEQ(test, hash, expected_hash,
148 				   SHA256_DIGEST_SIZE);
149 	}
150 }
151 
152 /* Benchmark for sha256_finup_2x() */
benchmark_sha256_finup_2x(struct kunit * test)153 static void benchmark_sha256_finup_2x(struct kunit *test)
154 {
155 	/*
156 	 * Try a few different salt lengths, since sha256_finup_2x() performance
157 	 * may vary slightly for the same data_len depending on how many bytes
158 	 * were already processed in the initial context.
159 	 */
160 	static const size_t salt_lens_to_test[] = { 0, 32, 64 };
161 	const size_t data_len = 4096;
162 	const size_t num_iters = 4096;
163 	struct sha256_ctx ctx;
164 	u8 hash1[SHA256_DIGEST_SIZE];
165 	u8 hash2[SHA256_DIGEST_SIZE];
166 
167 	if (!IS_ENABLED(CONFIG_CRYPTO_LIB_BENCHMARK))
168 		kunit_skip(test, "not enabled");
169 	if (!sha256_finup_2x_is_optimized())
170 		kunit_skip(test, "not relevant");
171 
172 	rand_bytes(test_buf, data_len * 2);
173 
174 	/* Warm-up */
175 	for (size_t i = 0; i < num_iters; i++)
176 		sha256_finup_2x(NULL, &test_buf[0], &test_buf[data_len],
177 				data_len, hash1, hash2);
178 
179 	for (size_t i = 0; i < ARRAY_SIZE(salt_lens_to_test); i++) {
180 		size_t salt_len = salt_lens_to_test[i];
181 		u64 t0, t1;
182 
183 		/*
184 		 * Prepare the initial context.  The time to process the salt is
185 		 * not measured; we're just interested in sha256_finup_2x().
186 		 */
187 		sha256_init(&ctx);
188 		sha256_update(&ctx, test_buf, salt_len);
189 
190 		preempt_disable();
191 		t0 = ktime_get_ns();
192 		for (size_t j = 0; j < num_iters; j++)
193 			sha256_finup_2x(&ctx, &test_buf[0], &test_buf[data_len],
194 					data_len, hash1, hash2);
195 		t1 = ktime_get_ns();
196 		preempt_enable();
197 		kunit_info(test, "data_len=%zu salt_len=%zu: %llu MB/s",
198 			   data_len, salt_len,
199 			   div64_u64((u64)data_len * 2 * num_iters * 1000,
200 				     t1 - t0 ?: 1));
201 	}
202 }
203 
204 static struct kunit_case hash_test_cases[] = {
205 	HASH_KUNIT_CASES,
206 	KUNIT_CASE(test_sha256_finup_2x),
207 	KUNIT_CASE(test_sha256_finup_2x_defaultctx),
208 	KUNIT_CASE(test_sha256_finup_2x_hugelen),
209 	KUNIT_CASE(benchmark_hash),
210 	KUNIT_CASE(benchmark_sha256_finup_2x),
211 	{},
212 };
213 
214 static struct kunit_suite hash_test_suite = {
215 	.name = "sha256",
216 	.test_cases = hash_test_cases,
217 	.suite_init = hash_suite_init,
218 	.suite_exit = hash_suite_exit,
219 };
220 kunit_test_suite(hash_test_suite);
221 
222 MODULE_DESCRIPTION("KUnit tests and benchmark for SHA-256 and HMAC-SHA256");
223 MODULE_LICENSE("GPL");
224