1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256 library functions
4 *
5 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
6 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2014 Red Hat Inc.
9 * Copyright 2025 Google LLC
10 */
11
12 #include <crypto/hmac.h>
13 #include <crypto/sha2.h>
14 #include <linux/export.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/string.h>
18 #include <linux/unaligned.h>
19 #include <linux/wordpart.h>
20
21 static const struct sha256_block_state sha224_iv = {
22 .h = {
23 SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
24 SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
25 },
26 };
27
28 static const struct sha256_block_state sha256_iv = {
29 .h = {
30 SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
31 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
32 },
33 };
34
35 static const u32 sha256_K[64] = {
36 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1,
37 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
38 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786,
39 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
40 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147,
41 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
42 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b,
43 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
44 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a,
45 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
46 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
47 };
48
49 #define Ch(x, y, z) ((z) ^ ((x) & ((y) ^ (z))))
50 #define Maj(x, y, z) (((x) & (y)) | ((z) & ((x) | (y))))
51 #define e0(x) (ror32((x), 2) ^ ror32((x), 13) ^ ror32((x), 22))
52 #define e1(x) (ror32((x), 6) ^ ror32((x), 11) ^ ror32((x), 25))
53 #define s0(x) (ror32((x), 7) ^ ror32((x), 18) ^ ((x) >> 3))
54 #define s1(x) (ror32((x), 17) ^ ror32((x), 19) ^ ((x) >> 10))
55
LOAD_OP(int I,u32 * W,const u8 * input)56 static inline void LOAD_OP(int I, u32 *W, const u8 *input)
57 {
58 W[I] = get_unaligned_be32((__u32 *)input + I);
59 }
60
BLEND_OP(int I,u32 * W)61 static inline void BLEND_OP(int I, u32 *W)
62 {
63 W[I] = s1(W[I - 2]) + W[I - 7] + s0(W[I - 15]) + W[I - 16];
64 }
65
66 #define SHA256_ROUND(i, a, b, c, d, e, f, g, h) \
67 do { \
68 u32 t1, t2; \
69 t1 = h + e1(e) + Ch(e, f, g) + sha256_K[i] + W[i]; \
70 t2 = e0(a) + Maj(a, b, c); \
71 d += t1; \
72 h = t1 + t2; \
73 } while (0)
74
sha256_block_generic(struct sha256_block_state * state,const u8 * input,u32 W[64])75 static void sha256_block_generic(struct sha256_block_state *state,
76 const u8 *input, u32 W[64])
77 {
78 u32 a, b, c, d, e, f, g, h;
79 int i;
80
81 /* load the input */
82 for (i = 0; i < 16; i += 8) {
83 LOAD_OP(i + 0, W, input);
84 LOAD_OP(i + 1, W, input);
85 LOAD_OP(i + 2, W, input);
86 LOAD_OP(i + 3, W, input);
87 LOAD_OP(i + 4, W, input);
88 LOAD_OP(i + 5, W, input);
89 LOAD_OP(i + 6, W, input);
90 LOAD_OP(i + 7, W, input);
91 }
92
93 /* now blend */
94 for (i = 16; i < 64; i += 8) {
95 BLEND_OP(i + 0, W);
96 BLEND_OP(i + 1, W);
97 BLEND_OP(i + 2, W);
98 BLEND_OP(i + 3, W);
99 BLEND_OP(i + 4, W);
100 BLEND_OP(i + 5, W);
101 BLEND_OP(i + 6, W);
102 BLEND_OP(i + 7, W);
103 }
104
105 /* load the state into our registers */
106 a = state->h[0];
107 b = state->h[1];
108 c = state->h[2];
109 d = state->h[3];
110 e = state->h[4];
111 f = state->h[5];
112 g = state->h[6];
113 h = state->h[7];
114
115 /* now iterate */
116 for (i = 0; i < 64; i += 8) {
117 SHA256_ROUND(i + 0, a, b, c, d, e, f, g, h);
118 SHA256_ROUND(i + 1, h, a, b, c, d, e, f, g);
119 SHA256_ROUND(i + 2, g, h, a, b, c, d, e, f);
120 SHA256_ROUND(i + 3, f, g, h, a, b, c, d, e);
121 SHA256_ROUND(i + 4, e, f, g, h, a, b, c, d);
122 SHA256_ROUND(i + 5, d, e, f, g, h, a, b, c);
123 SHA256_ROUND(i + 6, c, d, e, f, g, h, a, b);
124 SHA256_ROUND(i + 7, b, c, d, e, f, g, h, a);
125 }
126
127 state->h[0] += a;
128 state->h[1] += b;
129 state->h[2] += c;
130 state->h[3] += d;
131 state->h[4] += e;
132 state->h[5] += f;
133 state->h[6] += g;
134 state->h[7] += h;
135 }
136
137 static void __maybe_unused
sha256_blocks_generic(struct sha256_block_state * state,const u8 * data,size_t nblocks)138 sha256_blocks_generic(struct sha256_block_state *state,
139 const u8 *data, size_t nblocks)
140 {
141 u32 W[64];
142
143 do {
144 sha256_block_generic(state, data, W);
145 data += SHA256_BLOCK_SIZE;
146 } while (--nblocks);
147
148 memzero_explicit(W, sizeof(W));
149 }
150
151 #if defined(CONFIG_CRYPTO_LIB_SHA256_ARCH) && !defined(__DISABLE_EXPORTS)
152 #include "sha256.h" /* $(SRCARCH)/sha256.h */
153 #else
154 #define sha256_blocks sha256_blocks_generic
155 #endif
156
__sha256_init(struct __sha256_ctx * ctx,const struct sha256_block_state * iv,u64 initial_bytecount)157 static void __sha256_init(struct __sha256_ctx *ctx,
158 const struct sha256_block_state *iv,
159 u64 initial_bytecount)
160 {
161 ctx->state = *iv;
162 ctx->bytecount = initial_bytecount;
163 }
164
sha224_init(struct sha224_ctx * ctx)165 void sha224_init(struct sha224_ctx *ctx)
166 {
167 __sha256_init(&ctx->ctx, &sha224_iv, 0);
168 }
169 EXPORT_SYMBOL_GPL(sha224_init);
170
sha256_init(struct sha256_ctx * ctx)171 void sha256_init(struct sha256_ctx *ctx)
172 {
173 __sha256_init(&ctx->ctx, &sha256_iv, 0);
174 }
175 EXPORT_SYMBOL_GPL(sha256_init);
176
__sha256_update(struct __sha256_ctx * ctx,const u8 * data,size_t len)177 void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len)
178 {
179 size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
180
181 ctx->bytecount += len;
182
183 if (partial + len >= SHA256_BLOCK_SIZE) {
184 size_t nblocks;
185
186 if (partial) {
187 size_t l = SHA256_BLOCK_SIZE - partial;
188
189 memcpy(&ctx->buf[partial], data, l);
190 data += l;
191 len -= l;
192
193 sha256_blocks(&ctx->state, ctx->buf, 1);
194 }
195
196 nblocks = len / SHA256_BLOCK_SIZE;
197 len %= SHA256_BLOCK_SIZE;
198
199 if (nblocks) {
200 sha256_blocks(&ctx->state, data, nblocks);
201 data += nblocks * SHA256_BLOCK_SIZE;
202 }
203 partial = 0;
204 }
205 if (len)
206 memcpy(&ctx->buf[partial], data, len);
207 }
208 EXPORT_SYMBOL(__sha256_update);
209
__sha256_final(struct __sha256_ctx * ctx,u8 * out,size_t digest_size)210 static void __sha256_final(struct __sha256_ctx *ctx,
211 u8 *out, size_t digest_size)
212 {
213 u64 bitcount = ctx->bytecount << 3;
214 size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
215
216 ctx->buf[partial++] = 0x80;
217 if (partial > SHA256_BLOCK_SIZE - 8) {
218 memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial);
219 sha256_blocks(&ctx->state, ctx->buf, 1);
220 partial = 0;
221 }
222 memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial);
223 *(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount);
224 sha256_blocks(&ctx->state, ctx->buf, 1);
225
226 for (size_t i = 0; i < digest_size; i += 4)
227 put_unaligned_be32(ctx->state.h[i / 4], out + i);
228 }
229
sha224_final(struct sha224_ctx * ctx,u8 out[SHA224_DIGEST_SIZE])230 void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE])
231 {
232 __sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
233 memzero_explicit(ctx, sizeof(*ctx));
234 }
235 EXPORT_SYMBOL(sha224_final);
236
sha256_final(struct sha256_ctx * ctx,u8 out[SHA256_DIGEST_SIZE])237 void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE])
238 {
239 __sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
240 memzero_explicit(ctx, sizeof(*ctx));
241 }
242 EXPORT_SYMBOL(sha256_final);
243
sha224(const u8 * data,size_t len,u8 out[SHA224_DIGEST_SIZE])244 void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE])
245 {
246 struct sha224_ctx ctx;
247
248 sha224_init(&ctx);
249 sha224_update(&ctx, data, len);
250 sha224_final(&ctx, out);
251 }
252 EXPORT_SYMBOL(sha224);
253
sha256(const u8 * data,size_t len,u8 out[SHA256_DIGEST_SIZE])254 void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE])
255 {
256 struct sha256_ctx ctx;
257
258 sha256_init(&ctx);
259 sha256_update(&ctx, data, len);
260 sha256_final(&ctx, out);
261 }
262 EXPORT_SYMBOL(sha256);
263
264 /* pre-boot environment (as indicated by __DISABLE_EXPORTS) doesn't need HMAC */
265 #ifndef __DISABLE_EXPORTS
__hmac_sha256_preparekey(struct sha256_block_state * istate,struct sha256_block_state * ostate,const u8 * raw_key,size_t raw_key_len,const struct sha256_block_state * iv)266 static void __hmac_sha256_preparekey(struct sha256_block_state *istate,
267 struct sha256_block_state *ostate,
268 const u8 *raw_key, size_t raw_key_len,
269 const struct sha256_block_state *iv)
270 {
271 union {
272 u8 b[SHA256_BLOCK_SIZE];
273 unsigned long w[SHA256_BLOCK_SIZE / sizeof(unsigned long)];
274 } derived_key = { 0 };
275
276 if (unlikely(raw_key_len > SHA256_BLOCK_SIZE)) {
277 if (iv == &sha224_iv)
278 sha224(raw_key, raw_key_len, derived_key.b);
279 else
280 sha256(raw_key, raw_key_len, derived_key.b);
281 } else {
282 memcpy(derived_key.b, raw_key, raw_key_len);
283 }
284
285 for (size_t i = 0; i < ARRAY_SIZE(derived_key.w); i++)
286 derived_key.w[i] ^= REPEAT_BYTE(HMAC_IPAD_VALUE);
287 *istate = *iv;
288 sha256_blocks(istate, derived_key.b, 1);
289
290 for (size_t i = 0; i < ARRAY_SIZE(derived_key.w); i++)
291 derived_key.w[i] ^= REPEAT_BYTE(HMAC_OPAD_VALUE ^
292 HMAC_IPAD_VALUE);
293 *ostate = *iv;
294 sha256_blocks(ostate, derived_key.b, 1);
295
296 memzero_explicit(&derived_key, sizeof(derived_key));
297 }
298
hmac_sha224_preparekey(struct hmac_sha224_key * key,const u8 * raw_key,size_t raw_key_len)299 void hmac_sha224_preparekey(struct hmac_sha224_key *key,
300 const u8 *raw_key, size_t raw_key_len)
301 {
302 __hmac_sha256_preparekey(&key->key.istate, &key->key.ostate,
303 raw_key, raw_key_len, &sha224_iv);
304 }
305 EXPORT_SYMBOL_GPL(hmac_sha224_preparekey);
306
hmac_sha256_preparekey(struct hmac_sha256_key * key,const u8 * raw_key,size_t raw_key_len)307 void hmac_sha256_preparekey(struct hmac_sha256_key *key,
308 const u8 *raw_key, size_t raw_key_len)
309 {
310 __hmac_sha256_preparekey(&key->key.istate, &key->key.ostate,
311 raw_key, raw_key_len, &sha256_iv);
312 }
313 EXPORT_SYMBOL_GPL(hmac_sha256_preparekey);
314
__hmac_sha256_init(struct __hmac_sha256_ctx * ctx,const struct __hmac_sha256_key * key)315 void __hmac_sha256_init(struct __hmac_sha256_ctx *ctx,
316 const struct __hmac_sha256_key *key)
317 {
318 __sha256_init(&ctx->sha_ctx, &key->istate, SHA256_BLOCK_SIZE);
319 ctx->ostate = key->ostate;
320 }
321 EXPORT_SYMBOL_GPL(__hmac_sha256_init);
322
hmac_sha224_init_usingrawkey(struct hmac_sha224_ctx * ctx,const u8 * raw_key,size_t raw_key_len)323 void hmac_sha224_init_usingrawkey(struct hmac_sha224_ctx *ctx,
324 const u8 *raw_key, size_t raw_key_len)
325 {
326 __hmac_sha256_preparekey(&ctx->ctx.sha_ctx.state, &ctx->ctx.ostate,
327 raw_key, raw_key_len, &sha224_iv);
328 ctx->ctx.sha_ctx.bytecount = SHA256_BLOCK_SIZE;
329 }
330 EXPORT_SYMBOL_GPL(hmac_sha224_init_usingrawkey);
331
hmac_sha256_init_usingrawkey(struct hmac_sha256_ctx * ctx,const u8 * raw_key,size_t raw_key_len)332 void hmac_sha256_init_usingrawkey(struct hmac_sha256_ctx *ctx,
333 const u8 *raw_key, size_t raw_key_len)
334 {
335 __hmac_sha256_preparekey(&ctx->ctx.sha_ctx.state, &ctx->ctx.ostate,
336 raw_key, raw_key_len, &sha256_iv);
337 ctx->ctx.sha_ctx.bytecount = SHA256_BLOCK_SIZE;
338 }
339 EXPORT_SYMBOL_GPL(hmac_sha256_init_usingrawkey);
340
__hmac_sha256_final(struct __hmac_sha256_ctx * ctx,u8 * out,size_t digest_size)341 static void __hmac_sha256_final(struct __hmac_sha256_ctx *ctx,
342 u8 *out, size_t digest_size)
343 {
344 /* Generate the padded input for the outer hash in ctx->sha_ctx.buf. */
345 __sha256_final(&ctx->sha_ctx, ctx->sha_ctx.buf, digest_size);
346 memset(&ctx->sha_ctx.buf[digest_size], 0,
347 SHA256_BLOCK_SIZE - digest_size);
348 ctx->sha_ctx.buf[digest_size] = 0x80;
349 *(__be32 *)&ctx->sha_ctx.buf[SHA256_BLOCK_SIZE - 4] =
350 cpu_to_be32(8 * (SHA256_BLOCK_SIZE + digest_size));
351
352 /* Compute the outer hash, which gives the HMAC value. */
353 sha256_blocks(&ctx->ostate, ctx->sha_ctx.buf, 1);
354 for (size_t i = 0; i < digest_size; i += 4)
355 put_unaligned_be32(ctx->ostate.h[i / 4], out + i);
356
357 memzero_explicit(ctx, sizeof(*ctx));
358 }
359
hmac_sha224_final(struct hmac_sha224_ctx * ctx,u8 out[SHA224_DIGEST_SIZE])360 void hmac_sha224_final(struct hmac_sha224_ctx *ctx,
361 u8 out[SHA224_DIGEST_SIZE])
362 {
363 __hmac_sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
364 }
365 EXPORT_SYMBOL_GPL(hmac_sha224_final);
366
hmac_sha256_final(struct hmac_sha256_ctx * ctx,u8 out[SHA256_DIGEST_SIZE])367 void hmac_sha256_final(struct hmac_sha256_ctx *ctx,
368 u8 out[SHA256_DIGEST_SIZE])
369 {
370 __hmac_sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
371 }
372 EXPORT_SYMBOL_GPL(hmac_sha256_final);
373
hmac_sha224(const struct hmac_sha224_key * key,const u8 * data,size_t data_len,u8 out[SHA224_DIGEST_SIZE])374 void hmac_sha224(const struct hmac_sha224_key *key,
375 const u8 *data, size_t data_len, u8 out[SHA224_DIGEST_SIZE])
376 {
377 struct hmac_sha224_ctx ctx;
378
379 hmac_sha224_init(&ctx, key);
380 hmac_sha224_update(&ctx, data, data_len);
381 hmac_sha224_final(&ctx, out);
382 }
383 EXPORT_SYMBOL_GPL(hmac_sha224);
384
hmac_sha256(const struct hmac_sha256_key * key,const u8 * data,size_t data_len,u8 out[SHA256_DIGEST_SIZE])385 void hmac_sha256(const struct hmac_sha256_key *key,
386 const u8 *data, size_t data_len, u8 out[SHA256_DIGEST_SIZE])
387 {
388 struct hmac_sha256_ctx ctx;
389
390 hmac_sha256_init(&ctx, key);
391 hmac_sha256_update(&ctx, data, data_len);
392 hmac_sha256_final(&ctx, out);
393 }
394 EXPORT_SYMBOL_GPL(hmac_sha256);
395
hmac_sha224_usingrawkey(const u8 * raw_key,size_t raw_key_len,const u8 * data,size_t data_len,u8 out[SHA224_DIGEST_SIZE])396 void hmac_sha224_usingrawkey(const u8 *raw_key, size_t raw_key_len,
397 const u8 *data, size_t data_len,
398 u8 out[SHA224_DIGEST_SIZE])
399 {
400 struct hmac_sha224_ctx ctx;
401
402 hmac_sha224_init_usingrawkey(&ctx, raw_key, raw_key_len);
403 hmac_sha224_update(&ctx, data, data_len);
404 hmac_sha224_final(&ctx, out);
405 }
406 EXPORT_SYMBOL_GPL(hmac_sha224_usingrawkey);
407
hmac_sha256_usingrawkey(const u8 * raw_key,size_t raw_key_len,const u8 * data,size_t data_len,u8 out[SHA256_DIGEST_SIZE])408 void hmac_sha256_usingrawkey(const u8 *raw_key, size_t raw_key_len,
409 const u8 *data, size_t data_len,
410 u8 out[SHA256_DIGEST_SIZE])
411 {
412 struct hmac_sha256_ctx ctx;
413
414 hmac_sha256_init_usingrawkey(&ctx, raw_key, raw_key_len);
415 hmac_sha256_update(&ctx, data, data_len);
416 hmac_sha256_final(&ctx, out);
417 }
418 EXPORT_SYMBOL_GPL(hmac_sha256_usingrawkey);
419 #endif /* !__DISABLE_EXPORTS */
420
421 #ifdef sha256_mod_init_arch
sha256_mod_init(void)422 static int __init sha256_mod_init(void)
423 {
424 sha256_mod_init_arch();
425 return 0;
426 }
427 subsys_initcall(sha256_mod_init);
428
sha256_mod_exit(void)429 static void __exit sha256_mod_exit(void)
430 {
431 }
432 module_exit(sha256_mod_exit);
433 #endif
434
435 MODULE_DESCRIPTION("SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256 library functions");
436 MODULE_LICENSE("GPL");
437