1950e5c84SEric Biggers /* SPDX-License-Identifier: GPL-2.0-only */
2950e5c84SEric Biggers
3950e5c84SEric Biggers #ifndef _CRYPTO_INTERNAL_SHA2_H
4950e5c84SEric Biggers #define _CRYPTO_INTERNAL_SHA2_H
5950e5c84SEric Biggers
65b90a779SHerbert Xu #include <crypto/internal/simd.h>
7950e5c84SEric Biggers #include <crypto/sha2.h>
85b90a779SHerbert Xu #include <linux/compiler_attributes.h>
95b90a779SHerbert Xu #include <linux/string.h>
105b90a779SHerbert Xu #include <linux/types.h>
115b90a779SHerbert Xu #include <linux/unaligned.h>
12950e5c84SEric Biggers
13950e5c84SEric Biggers #if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256)
14950e5c84SEric Biggers bool sha256_is_arch_optimized(void);
15950e5c84SEric Biggers #else
sha256_is_arch_optimized(void)16950e5c84SEric Biggers static inline bool sha256_is_arch_optimized(void)
17950e5c84SEric Biggers {
18950e5c84SEric Biggers return false;
19950e5c84SEric Biggers }
20950e5c84SEric Biggers #endif
21950e5c84SEric Biggers void sha256_blocks_generic(u32 state[SHA256_STATE_WORDS],
22950e5c84SEric Biggers const u8 *data, size_t nblocks);
23950e5c84SEric Biggers void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
24950e5c84SEric Biggers const u8 *data, size_t nblocks);
255b90a779SHerbert Xu void sha256_blocks_simd(u32 state[SHA256_STATE_WORDS],
265b90a779SHerbert Xu const u8 *data, size_t nblocks);
275b90a779SHerbert Xu
sha256_choose_blocks(u32 state[SHA256_STATE_WORDS],const u8 * data,size_t nblocks,bool force_generic,bool force_simd)28*64f7548aSArnd Bergmann static __always_inline void sha256_choose_blocks(
295b90a779SHerbert Xu u32 state[SHA256_STATE_WORDS], const u8 *data, size_t nblocks,
305b90a779SHerbert Xu bool force_generic, bool force_simd)
315b90a779SHerbert Xu {
325b90a779SHerbert Xu if (!IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256) || force_generic)
335b90a779SHerbert Xu sha256_blocks_generic(state, data, nblocks);
345b90a779SHerbert Xu else if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256_SIMD) &&
355b90a779SHerbert Xu (force_simd || crypto_simd_usable()))
365b90a779SHerbert Xu sha256_blocks_simd(state, data, nblocks);
375b90a779SHerbert Xu else
385b90a779SHerbert Xu sha256_blocks_arch(state, data, nblocks);
395b90a779SHerbert Xu }
405b90a779SHerbert Xu
sha256_finup(struct crypto_sha256_state * sctx,u8 buf[SHA256_BLOCK_SIZE],size_t len,u8 out[SHA256_DIGEST_SIZE],size_t digest_size,bool force_generic,bool force_simd)415b90a779SHerbert Xu static __always_inline void sha256_finup(
425b90a779SHerbert Xu struct crypto_sha256_state *sctx, u8 buf[SHA256_BLOCK_SIZE],
435b90a779SHerbert Xu size_t len, u8 out[SHA256_DIGEST_SIZE], size_t digest_size,
445b90a779SHerbert Xu bool force_generic, bool force_simd)
455b90a779SHerbert Xu {
465b90a779SHerbert Xu const size_t bit_offset = SHA256_BLOCK_SIZE - 8;
475b90a779SHerbert Xu __be64 *bits = (__be64 *)&buf[bit_offset];
485b90a779SHerbert Xu int i;
495b90a779SHerbert Xu
505b90a779SHerbert Xu buf[len++] = 0x80;
515b90a779SHerbert Xu if (len > bit_offset) {
525b90a779SHerbert Xu memset(&buf[len], 0, SHA256_BLOCK_SIZE - len);
535b90a779SHerbert Xu sha256_choose_blocks(sctx->state, buf, 1, force_generic,
545b90a779SHerbert Xu force_simd);
555b90a779SHerbert Xu len = 0;
565b90a779SHerbert Xu }
575b90a779SHerbert Xu
585b90a779SHerbert Xu memset(&buf[len], 0, bit_offset - len);
595b90a779SHerbert Xu *bits = cpu_to_be64(sctx->count << 3);
605b90a779SHerbert Xu sha256_choose_blocks(sctx->state, buf, 1, force_generic, force_simd);
615b90a779SHerbert Xu
625b90a779SHerbert Xu for (i = 0; i < digest_size; i += 4)
635b90a779SHerbert Xu put_unaligned_be32(sctx->state[i / 4], out + i);
645b90a779SHerbert Xu }
65950e5c84SEric Biggers
66950e5c84SEric Biggers #endif /* _CRYPTO_INTERNAL_SHA2_H */
67