/linux/include/crypto/ |
H A D | sha256_base.h | 42 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 46 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in lib_sha256_base_do_update() 50 int p = SHA256_BLOCK_SIZE - partial; in lib_sha256_base_do_update() 59 blocks = len / SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 60 len %= SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 64 data += blocks * SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 87 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in lib_sha256_base_do_finalize() 89 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_finalize() 93 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in lib_sha256_base_do_finalize()
|
H A D | sha2.h | 15 #define SHA256_BLOCK_SIZE 64 macro 70 u8 buf[SHA256_BLOCK_SIZE];
|
/linux/net/mptcp/ |
H A D | crypto.c | 46 u8 input[SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE]; in mptcp_crypto_hmac_sha() 58 memset(input, 0x36, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 64 memcpy(&input[SHA256_BLOCK_SIZE], msg, len); in mptcp_crypto_hmac_sha() 69 sha256(input, SHA256_BLOCK_SIZE + len, &input[SHA256_BLOCK_SIZE]); in mptcp_crypto_hmac_sha() 72 memset(input, 0x5C, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 78 sha256(input, SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE, hmac); in mptcp_crypto_hmac_sha()
|
/linux/arch/sparc/crypto/ |
H A D | sha256_glue.c | 37 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update() 41 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update() 42 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 45 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 55 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() 58 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update() 73 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final() 78 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final() 79 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final() 141 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha256.c | 74 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update() 79 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update() 83 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update() 89 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update() 91 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update() 110 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update() 205 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/nx/ |
H A D | nx-sha256.c | 22 u8 buf[SHA256_BLOCK_SIZE]; 73 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update() 81 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update() 82 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update() 134 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update() 168 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update() 201 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final() 214 len = sctx->count & (SHA256_BLOCK_SIZE - 1); in nx_sha256_final() 218 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) { in nx_sha256_final() 282 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/arm64/crypto/ |
H A D | sha256-glue.c | 76 .base.cra_blocksize = SHA256_BLOCK_SIZE, 110 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon() 111 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon() 112 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon() 156 .base.cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha2-ce-glue.c | 47 src += (blocks - rem) * SHA256_BLOCK_SIZE; in sha256_ce_transform() 84 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup() 158 .cra_blocksize = SHA256_BLOCK_SIZE, 176 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/arm/crypto/ |
H A D | sha2-ce-glue.c | 36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update() 79 .cra_blocksize = SHA256_BLOCK_SIZE, 93 .cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha256_neon_glue.c | 33 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update() 75 .cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha256_glue.c | 65 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/x86/crypto/ |
H A D | sha256_ssse3_glue.c | 64 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update() 132 .cra_blocksize = SHA256_BLOCK_SIZE, 205 .cra_blocksize = SHA256_BLOCK_SIZE, 289 .cra_blocksize = SHA256_BLOCK_SIZE, 372 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/ |
H A D | padlock-sha.c | 155 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup() 156 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup() 261 .cra_blocksize = SHA256_BLOCK_SIZE, 381 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 387 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano() 392 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano() 397 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 488 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/qce/ |
H A D | sha.h | 16 #define QCE_SHA_MAX_BLOCKSIZE SHA256_BLOCK_SIZE
|
/linux/crypto/ |
H A D | sha256_generic.c | 72 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/riscv/crypto/ |
H A D | sha256-riscv64-glue.c | 88 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/s390/crypto/ |
H A D | sha256_s390.c | 74 .cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | hmac_s390.c | 87 case SHA256_BLOCK_SIZE: in kmac_sha2_set_imbl()
|
/linux/drivers/crypto/allwinner/sun8i-ss/ |
H A D | sun8i-ss.h | 236 u8 key[SHA256_BLOCK_SIZE];
|
/linux/fs/verity/ |
H A D | hash_algs.c | 17 .block_size = SHA256_BLOCK_SIZE,
|
/linux/arch/powerpc/crypto/ |
H A D | sha256-spe-glue.c | 195 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/lib/crypto/ |
H A D | sha256.c | 128 input += SHA256_BLOCK_SIZE; in sha256_transform_blocks()
|
/linux/drivers/crypto/intel/keembay/ |
H A D | keembay-ocs-hcu-core.c | 585 rctx->blk_sz = SHA256_BLOCK_SIZE; in kmb_ocs_hcu_init() 952 .cra_blocksize = SHA256_BLOCK_SIZE, 978 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/aspeed/ |
H A D | aspeed-hace-hash.c | 699 rctx->block_size = SHA256_BLOCK_SIZE; in aspeed_sham_init() 903 .cra_blocksize = SHA256_BLOCK_SIZE, 1039 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/starfive/ |
H A D | jh7110-hash.c | 632 .cra_blocksize = SHA256_BLOCK_SIZE, 661 .cra_blocksize = SHA256_BLOCK_SIZE,
|