1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256 library functions
4 *
5 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
6 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2014 Red Hat Inc.
9 * Copyright 2025 Google LLC
10 */
11
12 #include <crypto/hmac.h>
13 #include <crypto/sha2.h>
14 #include <linux/export.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/string.h>
18 #include <linux/unaligned.h>
19 #include <linux/wordpart.h>
20 #include "fips.h"
21
22 static const struct sha256_block_state sha224_iv = {
23 .h = {
24 SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
25 SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
26 },
27 };
28
29 static const struct sha256_ctx initial_sha256_ctx = {
30 .ctx = {
31 .state = {
32 .h = {
33 SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
34 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
35 },
36 },
37 .bytecount = 0,
38 },
39 };
40
41 #define sha256_iv (initial_sha256_ctx.ctx.state)
42
43 static const u32 sha256_K[64] = {
44 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1,
45 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
46 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786,
47 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
48 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147,
49 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
50 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b,
51 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
52 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a,
53 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
54 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
55 };
56
57 #define Ch(x, y, z) ((z) ^ ((x) & ((y) ^ (z))))
58 #define Maj(x, y, z) (((x) & (y)) | ((z) & ((x) | (y))))
59 #define e0(x) (ror32((x), 2) ^ ror32((x), 13) ^ ror32((x), 22))
60 #define e1(x) (ror32((x), 6) ^ ror32((x), 11) ^ ror32((x), 25))
61 #define s0(x) (ror32((x), 7) ^ ror32((x), 18) ^ ((x) >> 3))
62 #define s1(x) (ror32((x), 17) ^ ror32((x), 19) ^ ((x) >> 10))
63
LOAD_OP(int I,u32 * W,const u8 * input)64 static inline void LOAD_OP(int I, u32 *W, const u8 *input)
65 {
66 W[I] = get_unaligned_be32((__u32 *)input + I);
67 }
68
BLEND_OP(int I,u32 * W)69 static inline void BLEND_OP(int I, u32 *W)
70 {
71 W[I] = s1(W[I - 2]) + W[I - 7] + s0(W[I - 15]) + W[I - 16];
72 }
73
74 #define SHA256_ROUND(i, a, b, c, d, e, f, g, h) \
75 do { \
76 u32 t1, t2; \
77 t1 = h + e1(e) + Ch(e, f, g) + sha256_K[i] + W[i]; \
78 t2 = e0(a) + Maj(a, b, c); \
79 d += t1; \
80 h = t1 + t2; \
81 } while (0)
82
sha256_block_generic(struct sha256_block_state * state,const u8 * input,u32 W[64])83 static void sha256_block_generic(struct sha256_block_state *state,
84 const u8 *input, u32 W[64])
85 {
86 u32 a, b, c, d, e, f, g, h;
87 int i;
88
89 /* load the input */
90 for (i = 0; i < 16; i += 8) {
91 LOAD_OP(i + 0, W, input);
92 LOAD_OP(i + 1, W, input);
93 LOAD_OP(i + 2, W, input);
94 LOAD_OP(i + 3, W, input);
95 LOAD_OP(i + 4, W, input);
96 LOAD_OP(i + 5, W, input);
97 LOAD_OP(i + 6, W, input);
98 LOAD_OP(i + 7, W, input);
99 }
100
101 /* now blend */
102 for (i = 16; i < 64; i += 8) {
103 BLEND_OP(i + 0, W);
104 BLEND_OP(i + 1, W);
105 BLEND_OP(i + 2, W);
106 BLEND_OP(i + 3, W);
107 BLEND_OP(i + 4, W);
108 BLEND_OP(i + 5, W);
109 BLEND_OP(i + 6, W);
110 BLEND_OP(i + 7, W);
111 }
112
113 /* load the state into our registers */
114 a = state->h[0];
115 b = state->h[1];
116 c = state->h[2];
117 d = state->h[3];
118 e = state->h[4];
119 f = state->h[5];
120 g = state->h[6];
121 h = state->h[7];
122
123 /* now iterate */
124 for (i = 0; i < 64; i += 8) {
125 SHA256_ROUND(i + 0, a, b, c, d, e, f, g, h);
126 SHA256_ROUND(i + 1, h, a, b, c, d, e, f, g);
127 SHA256_ROUND(i + 2, g, h, a, b, c, d, e, f);
128 SHA256_ROUND(i + 3, f, g, h, a, b, c, d, e);
129 SHA256_ROUND(i + 4, e, f, g, h, a, b, c, d);
130 SHA256_ROUND(i + 5, d, e, f, g, h, a, b, c);
131 SHA256_ROUND(i + 6, c, d, e, f, g, h, a, b);
132 SHA256_ROUND(i + 7, b, c, d, e, f, g, h, a);
133 }
134
135 state->h[0] += a;
136 state->h[1] += b;
137 state->h[2] += c;
138 state->h[3] += d;
139 state->h[4] += e;
140 state->h[5] += f;
141 state->h[6] += g;
142 state->h[7] += h;
143 }
144
145 static void __maybe_unused
sha256_blocks_generic(struct sha256_block_state * state,const u8 * data,size_t nblocks)146 sha256_blocks_generic(struct sha256_block_state *state,
147 const u8 *data, size_t nblocks)
148 {
149 u32 W[64];
150
151 do {
152 sha256_block_generic(state, data, W);
153 data += SHA256_BLOCK_SIZE;
154 } while (--nblocks);
155
156 memzero_explicit(W, sizeof(W));
157 }
158
159 #if defined(CONFIG_CRYPTO_LIB_SHA256_ARCH) && !defined(__DISABLE_EXPORTS)
160 #include "sha256.h" /* $(SRCARCH)/sha256.h */
161 #else
162 #define sha256_blocks sha256_blocks_generic
163 #endif
164
__sha256_init(struct __sha256_ctx * ctx,const struct sha256_block_state * iv,u64 initial_bytecount)165 static void __sha256_init(struct __sha256_ctx *ctx,
166 const struct sha256_block_state *iv,
167 u64 initial_bytecount)
168 {
169 ctx->state = *iv;
170 ctx->bytecount = initial_bytecount;
171 }
172
sha224_init(struct sha224_ctx * ctx)173 void sha224_init(struct sha224_ctx *ctx)
174 {
175 __sha256_init(&ctx->ctx, &sha224_iv, 0);
176 }
177 EXPORT_SYMBOL_GPL(sha224_init);
178
sha256_init(struct sha256_ctx * ctx)179 void sha256_init(struct sha256_ctx *ctx)
180 {
181 __sha256_init(&ctx->ctx, &sha256_iv, 0);
182 }
183 EXPORT_SYMBOL_GPL(sha256_init);
184
__sha256_update(struct __sha256_ctx * ctx,const u8 * data,size_t len)185 void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len)
186 {
187 size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
188
189 ctx->bytecount += len;
190
191 if (partial + len >= SHA256_BLOCK_SIZE) {
192 size_t nblocks;
193
194 if (partial) {
195 size_t l = SHA256_BLOCK_SIZE - partial;
196
197 memcpy(&ctx->buf[partial], data, l);
198 data += l;
199 len -= l;
200
201 sha256_blocks(&ctx->state, ctx->buf, 1);
202 }
203
204 nblocks = len / SHA256_BLOCK_SIZE;
205 len %= SHA256_BLOCK_SIZE;
206
207 if (nblocks) {
208 sha256_blocks(&ctx->state, data, nblocks);
209 data += nblocks * SHA256_BLOCK_SIZE;
210 }
211 partial = 0;
212 }
213 if (len)
214 memcpy(&ctx->buf[partial], data, len);
215 }
216 EXPORT_SYMBOL(__sha256_update);
217
__sha256_final(struct __sha256_ctx * ctx,u8 * out,size_t digest_size)218 static void __sha256_final(struct __sha256_ctx *ctx,
219 u8 *out, size_t digest_size)
220 {
221 u64 bitcount = ctx->bytecount << 3;
222 size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE;
223
224 ctx->buf[partial++] = 0x80;
225 if (partial > SHA256_BLOCK_SIZE - 8) {
226 memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial);
227 sha256_blocks(&ctx->state, ctx->buf, 1);
228 partial = 0;
229 }
230 memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial);
231 *(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount);
232 sha256_blocks(&ctx->state, ctx->buf, 1);
233
234 for (size_t i = 0; i < digest_size; i += 4)
235 put_unaligned_be32(ctx->state.h[i / 4], out + i);
236 }
237
sha224_final(struct sha224_ctx * ctx,u8 out[SHA224_DIGEST_SIZE])238 void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE])
239 {
240 __sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
241 memzero_explicit(ctx, sizeof(*ctx));
242 }
243 EXPORT_SYMBOL(sha224_final);
244
sha256_final(struct sha256_ctx * ctx,u8 out[SHA256_DIGEST_SIZE])245 void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE])
246 {
247 __sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
248 memzero_explicit(ctx, sizeof(*ctx));
249 }
250 EXPORT_SYMBOL(sha256_final);
251
sha224(const u8 * data,size_t len,u8 out[SHA224_DIGEST_SIZE])252 void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE])
253 {
254 struct sha224_ctx ctx;
255
256 sha224_init(&ctx);
257 sha224_update(&ctx, data, len);
258 sha224_final(&ctx, out);
259 }
260 EXPORT_SYMBOL(sha224);
261
sha256(const u8 * data,size_t len,u8 out[SHA256_DIGEST_SIZE])262 void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE])
263 {
264 struct sha256_ctx ctx;
265
266 sha256_init(&ctx);
267 sha256_update(&ctx, data, len);
268 sha256_final(&ctx, out);
269 }
270 EXPORT_SYMBOL(sha256);
271
272 /*
273 * Pre-boot environments (as indicated by __DISABLE_EXPORTS being defined) just
274 * need the generic SHA-256 code. Omit all other features from them.
275 */
276 #ifndef __DISABLE_EXPORTS
277
278 #ifndef sha256_finup_2x_arch
sha256_finup_2x_arch(const struct __sha256_ctx * ctx,const u8 * data1,const u8 * data2,size_t len,u8 out1[SHA256_DIGEST_SIZE],u8 out2[SHA256_DIGEST_SIZE])279 static bool sha256_finup_2x_arch(const struct __sha256_ctx *ctx,
280 const u8 *data1, const u8 *data2, size_t len,
281 u8 out1[SHA256_DIGEST_SIZE],
282 u8 out2[SHA256_DIGEST_SIZE])
283 {
284 return false;
285 }
sha256_finup_2x_is_optimized_arch(void)286 static bool sha256_finup_2x_is_optimized_arch(void)
287 {
288 return false;
289 }
290 #endif
291
292 /* Sequential fallback implementation of sha256_finup_2x() */
sha256_finup_2x_sequential(const struct __sha256_ctx * ctx,const u8 * data1,const u8 * data2,size_t len,u8 out1[SHA256_DIGEST_SIZE],u8 out2[SHA256_DIGEST_SIZE])293 static noinline_for_stack void sha256_finup_2x_sequential(
294 const struct __sha256_ctx *ctx, const u8 *data1, const u8 *data2,
295 size_t len, u8 out1[SHA256_DIGEST_SIZE], u8 out2[SHA256_DIGEST_SIZE])
296 {
297 struct __sha256_ctx mut_ctx;
298
299 mut_ctx = *ctx;
300 __sha256_update(&mut_ctx, data1, len);
301 __sha256_final(&mut_ctx, out1, SHA256_DIGEST_SIZE);
302
303 mut_ctx = *ctx;
304 __sha256_update(&mut_ctx, data2, len);
305 __sha256_final(&mut_ctx, out2, SHA256_DIGEST_SIZE);
306 }
307
sha256_finup_2x(const struct sha256_ctx * ctx,const u8 * data1,const u8 * data2,size_t len,u8 out1[SHA256_DIGEST_SIZE],u8 out2[SHA256_DIGEST_SIZE])308 void sha256_finup_2x(const struct sha256_ctx *ctx, const u8 *data1,
309 const u8 *data2, size_t len, u8 out1[SHA256_DIGEST_SIZE],
310 u8 out2[SHA256_DIGEST_SIZE])
311 {
312 if (ctx == NULL)
313 ctx = &initial_sha256_ctx;
314
315 if (likely(sha256_finup_2x_arch(&ctx->ctx, data1, data2, len, out1,
316 out2)))
317 return;
318 sha256_finup_2x_sequential(&ctx->ctx, data1, data2, len, out1, out2);
319 }
320 EXPORT_SYMBOL_GPL(sha256_finup_2x);
321
sha256_finup_2x_is_optimized(void)322 bool sha256_finup_2x_is_optimized(void)
323 {
324 return sha256_finup_2x_is_optimized_arch();
325 }
326 EXPORT_SYMBOL_GPL(sha256_finup_2x_is_optimized);
327
__hmac_sha256_preparekey(struct sha256_block_state * istate,struct sha256_block_state * ostate,const u8 * raw_key,size_t raw_key_len,const struct sha256_block_state * iv)328 static void __hmac_sha256_preparekey(struct sha256_block_state *istate,
329 struct sha256_block_state *ostate,
330 const u8 *raw_key, size_t raw_key_len,
331 const struct sha256_block_state *iv)
332 {
333 union {
334 u8 b[SHA256_BLOCK_SIZE];
335 unsigned long w[SHA256_BLOCK_SIZE / sizeof(unsigned long)];
336 } derived_key = { 0 };
337
338 if (unlikely(raw_key_len > SHA256_BLOCK_SIZE)) {
339 if (iv == &sha224_iv)
340 sha224(raw_key, raw_key_len, derived_key.b);
341 else
342 sha256(raw_key, raw_key_len, derived_key.b);
343 } else {
344 memcpy(derived_key.b, raw_key, raw_key_len);
345 }
346
347 for (size_t i = 0; i < ARRAY_SIZE(derived_key.w); i++)
348 derived_key.w[i] ^= REPEAT_BYTE(HMAC_IPAD_VALUE);
349 *istate = *iv;
350 sha256_blocks(istate, derived_key.b, 1);
351
352 for (size_t i = 0; i < ARRAY_SIZE(derived_key.w); i++)
353 derived_key.w[i] ^= REPEAT_BYTE(HMAC_OPAD_VALUE ^
354 HMAC_IPAD_VALUE);
355 *ostate = *iv;
356 sha256_blocks(ostate, derived_key.b, 1);
357
358 memzero_explicit(&derived_key, sizeof(derived_key));
359 }
360
hmac_sha224_preparekey(struct hmac_sha224_key * key,const u8 * raw_key,size_t raw_key_len)361 void hmac_sha224_preparekey(struct hmac_sha224_key *key,
362 const u8 *raw_key, size_t raw_key_len)
363 {
364 __hmac_sha256_preparekey(&key->key.istate, &key->key.ostate,
365 raw_key, raw_key_len, &sha224_iv);
366 }
367 EXPORT_SYMBOL_GPL(hmac_sha224_preparekey);
368
hmac_sha256_preparekey(struct hmac_sha256_key * key,const u8 * raw_key,size_t raw_key_len)369 void hmac_sha256_preparekey(struct hmac_sha256_key *key,
370 const u8 *raw_key, size_t raw_key_len)
371 {
372 __hmac_sha256_preparekey(&key->key.istate, &key->key.ostate,
373 raw_key, raw_key_len, &sha256_iv);
374 }
375 EXPORT_SYMBOL_GPL(hmac_sha256_preparekey);
376
__hmac_sha256_init(struct __hmac_sha256_ctx * ctx,const struct __hmac_sha256_key * key)377 void __hmac_sha256_init(struct __hmac_sha256_ctx *ctx,
378 const struct __hmac_sha256_key *key)
379 {
380 __sha256_init(&ctx->sha_ctx, &key->istate, SHA256_BLOCK_SIZE);
381 ctx->ostate = key->ostate;
382 }
383 EXPORT_SYMBOL_GPL(__hmac_sha256_init);
384
hmac_sha224_init_usingrawkey(struct hmac_sha224_ctx * ctx,const u8 * raw_key,size_t raw_key_len)385 void hmac_sha224_init_usingrawkey(struct hmac_sha224_ctx *ctx,
386 const u8 *raw_key, size_t raw_key_len)
387 {
388 __hmac_sha256_preparekey(&ctx->ctx.sha_ctx.state, &ctx->ctx.ostate,
389 raw_key, raw_key_len, &sha224_iv);
390 ctx->ctx.sha_ctx.bytecount = SHA256_BLOCK_SIZE;
391 }
392 EXPORT_SYMBOL_GPL(hmac_sha224_init_usingrawkey);
393
hmac_sha256_init_usingrawkey(struct hmac_sha256_ctx * ctx,const u8 * raw_key,size_t raw_key_len)394 void hmac_sha256_init_usingrawkey(struct hmac_sha256_ctx *ctx,
395 const u8 *raw_key, size_t raw_key_len)
396 {
397 __hmac_sha256_preparekey(&ctx->ctx.sha_ctx.state, &ctx->ctx.ostate,
398 raw_key, raw_key_len, &sha256_iv);
399 ctx->ctx.sha_ctx.bytecount = SHA256_BLOCK_SIZE;
400 }
401 EXPORT_SYMBOL_GPL(hmac_sha256_init_usingrawkey);
402
__hmac_sha256_final(struct __hmac_sha256_ctx * ctx,u8 * out,size_t digest_size)403 static void __hmac_sha256_final(struct __hmac_sha256_ctx *ctx,
404 u8 *out, size_t digest_size)
405 {
406 /* Generate the padded input for the outer hash in ctx->sha_ctx.buf. */
407 __sha256_final(&ctx->sha_ctx, ctx->sha_ctx.buf, digest_size);
408 memset(&ctx->sha_ctx.buf[digest_size], 0,
409 SHA256_BLOCK_SIZE - digest_size);
410 ctx->sha_ctx.buf[digest_size] = 0x80;
411 *(__be32 *)&ctx->sha_ctx.buf[SHA256_BLOCK_SIZE - 4] =
412 cpu_to_be32(8 * (SHA256_BLOCK_SIZE + digest_size));
413
414 /* Compute the outer hash, which gives the HMAC value. */
415 sha256_blocks(&ctx->ostate, ctx->sha_ctx.buf, 1);
416 for (size_t i = 0; i < digest_size; i += 4)
417 put_unaligned_be32(ctx->ostate.h[i / 4], out + i);
418
419 memzero_explicit(ctx, sizeof(*ctx));
420 }
421
hmac_sha224_final(struct hmac_sha224_ctx * ctx,u8 out[SHA224_DIGEST_SIZE])422 void hmac_sha224_final(struct hmac_sha224_ctx *ctx,
423 u8 out[SHA224_DIGEST_SIZE])
424 {
425 __hmac_sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE);
426 }
427 EXPORT_SYMBOL_GPL(hmac_sha224_final);
428
hmac_sha256_final(struct hmac_sha256_ctx * ctx,u8 out[SHA256_DIGEST_SIZE])429 void hmac_sha256_final(struct hmac_sha256_ctx *ctx,
430 u8 out[SHA256_DIGEST_SIZE])
431 {
432 __hmac_sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE);
433 }
434 EXPORT_SYMBOL_GPL(hmac_sha256_final);
435
hmac_sha224(const struct hmac_sha224_key * key,const u8 * data,size_t data_len,u8 out[SHA224_DIGEST_SIZE])436 void hmac_sha224(const struct hmac_sha224_key *key,
437 const u8 *data, size_t data_len, u8 out[SHA224_DIGEST_SIZE])
438 {
439 struct hmac_sha224_ctx ctx;
440
441 hmac_sha224_init(&ctx, key);
442 hmac_sha224_update(&ctx, data, data_len);
443 hmac_sha224_final(&ctx, out);
444 }
445 EXPORT_SYMBOL_GPL(hmac_sha224);
446
hmac_sha256(const struct hmac_sha256_key * key,const u8 * data,size_t data_len,u8 out[SHA256_DIGEST_SIZE])447 void hmac_sha256(const struct hmac_sha256_key *key,
448 const u8 *data, size_t data_len, u8 out[SHA256_DIGEST_SIZE])
449 {
450 struct hmac_sha256_ctx ctx;
451
452 hmac_sha256_init(&ctx, key);
453 hmac_sha256_update(&ctx, data, data_len);
454 hmac_sha256_final(&ctx, out);
455 }
456 EXPORT_SYMBOL_GPL(hmac_sha256);
457
hmac_sha224_usingrawkey(const u8 * raw_key,size_t raw_key_len,const u8 * data,size_t data_len,u8 out[SHA224_DIGEST_SIZE])458 void hmac_sha224_usingrawkey(const u8 *raw_key, size_t raw_key_len,
459 const u8 *data, size_t data_len,
460 u8 out[SHA224_DIGEST_SIZE])
461 {
462 struct hmac_sha224_ctx ctx;
463
464 hmac_sha224_init_usingrawkey(&ctx, raw_key, raw_key_len);
465 hmac_sha224_update(&ctx, data, data_len);
466 hmac_sha224_final(&ctx, out);
467 }
468 EXPORT_SYMBOL_GPL(hmac_sha224_usingrawkey);
469
hmac_sha256_usingrawkey(const u8 * raw_key,size_t raw_key_len,const u8 * data,size_t data_len,u8 out[SHA256_DIGEST_SIZE])470 void hmac_sha256_usingrawkey(const u8 *raw_key, size_t raw_key_len,
471 const u8 *data, size_t data_len,
472 u8 out[SHA256_DIGEST_SIZE])
473 {
474 struct hmac_sha256_ctx ctx;
475
476 hmac_sha256_init_usingrawkey(&ctx, raw_key, raw_key_len);
477 hmac_sha256_update(&ctx, data, data_len);
478 hmac_sha256_final(&ctx, out);
479 }
480 EXPORT_SYMBOL_GPL(hmac_sha256_usingrawkey);
481
482 #if defined(sha256_mod_init_arch) || defined(CONFIG_CRYPTO_FIPS)
sha256_mod_init(void)483 static int __init sha256_mod_init(void)
484 {
485 #ifdef sha256_mod_init_arch
486 sha256_mod_init_arch();
487 #endif
488 if (fips_enabled) {
489 /*
490 * FIPS cryptographic algorithm self-test. As per the FIPS
491 * Implementation Guidance, testing HMAC-SHA256 satisfies the
492 * test requirement for SHA-224, SHA-256, and HMAC-SHA224 too.
493 */
494 u8 mac[SHA256_DIGEST_SIZE];
495
496 hmac_sha256_usingrawkey(fips_test_key, sizeof(fips_test_key),
497 fips_test_data, sizeof(fips_test_data),
498 mac);
499 if (memcmp(fips_test_hmac_sha256_value, mac, sizeof(mac)) != 0)
500 panic("sha256: FIPS self-test failed\n");
501 }
502 return 0;
503 }
504 subsys_initcall(sha256_mod_init);
505
sha256_mod_exit(void)506 static void __exit sha256_mod_exit(void)
507 {
508 }
509 module_exit(sha256_mod_exit);
510 #endif
511
512 #endif /* !__DISABLE_EXPORTS */
513
514 MODULE_DESCRIPTION("SHA-224, SHA-256, HMAC-SHA224, and HMAC-SHA256 library functions");
515 MODULE_LICENSE("GPL");
516