Home
last modified time | relevance | path

Searched refs:CHACHA_BLOCK_SIZE (Results 1 – 17 of 17) sorted by relevance

/linux/arch/x86/crypto/
H A Dchacha_glue.c44 len = min(len, maxblocks * CHACHA_BLOCK_SIZE); in chacha_advance()
45 return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; in chacha_advance()
53 while (bytes >= CHACHA_BLOCK_SIZE * 8) { in chacha_dosimd()
56 bytes -= CHACHA_BLOCK_SIZE * 8; in chacha_dosimd()
57 src += CHACHA_BLOCK_SIZE * 8; in chacha_dosimd()
58 dst += CHACHA_BLOCK_SIZE * 8; in chacha_dosimd()
61 if (bytes > CHACHA_BLOCK_SIZE * 4) { in chacha_dosimd()
67 if (bytes > CHACHA_BLOCK_SIZE * 2) { in chacha_dosimd()
82 while (bytes >= CHACHA_BLOCK_SIZE * 8) { in chacha_dosimd()
84 bytes -= CHACHA_BLOCK_SIZE * 8; in chacha_dosimd()
[all …]
/linux/arch/arm/crypto/
H A Dchacha-glue.c43 u8 buf[CHACHA_BLOCK_SIZE]; in chacha_doneon()
45 while (bytes > CHACHA_BLOCK_SIZE) { in chacha_doneon()
46 unsigned int l = min(bytes, CHACHA_BLOCK_SIZE * 4U); in chacha_doneon()
52 state[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE); in chacha_doneon()
58 if (bytes != CHACHA_BLOCK_SIZE) in chacha_doneon()
89 bytes <= CHACHA_BLOCK_SIZE) { in chacha_crypt_arch()
91 state[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE); in chacha_crypt_arch()
130 state[12] += DIV_ROUND_UP(nbytes, CHACHA_BLOCK_SIZE); in chacha_stream_xor()
207 .chunksize = CHACHA_BLOCK_SIZE,
222 .chunksize = CHACHA_BLOCK_SIZE,
[all …]
/linux/lib/crypto/
H A Dlibchacha.c19 u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long)); in chacha_crypt_generic()
21 while (bytes >= CHACHA_BLOCK_SIZE) { in chacha_crypt_generic()
23 crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE); in chacha_crypt_generic()
24 bytes -= CHACHA_BLOCK_SIZE; in chacha_crypt_generic()
25 dst += CHACHA_BLOCK_SIZE; in chacha_crypt_generic()
26 src += CHACHA_BLOCK_SIZE; in chacha_crypt_generic()
H A Dchacha20poly1305.c229 u8 chacha_stream[CHACHA_BLOCK_SIZE]; in chacha20poly1305_crypt_sg_inplace()
266 size_t l = min(length, CHACHA_BLOCK_SIZE - partial); in chacha20poly1305_crypt_sg_inplace()
269 partial = (partial + l) & (CHACHA_BLOCK_SIZE - 1); in chacha20poly1305_crypt_sg_inplace()
275 if (likely(length >= CHACHA_BLOCK_SIZE || length == sl)) { in chacha20poly1305_crypt_sg_inplace()
279 l &= ~(CHACHA_BLOCK_SIZE - 1); in chacha20poly1305_crypt_sg_inplace()
287 CHACHA_BLOCK_SIZE); in chacha20poly1305_crypt_sg_inplace()
/linux/arch/riscv/crypto/
H A Dchacha-riscv64-glue.c22 u8 block_buffer[CHACHA_BLOCK_SIZE]; in riscv64_chacha20_crypt()
37 nbytes = walk.nbytes & ~(CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
38 tail_bytes = walk.nbytes & (CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
43 iv[0] += nbytes / CHACHA_BLOCK_SIZE; in riscv64_chacha20_crypt()
49 CHACHA_BLOCK_SIZE, iv); in riscv64_chacha20_crypt()
69 .chunksize = CHACHA_BLOCK_SIZE,
70 .walksize = 4 * CHACHA_BLOCK_SIZE,
/linux/arch/arm64/crypto/
H A Dchacha-neon-glue.c46 int l = min(bytes, CHACHA_BLOCK_SIZE * 5); in chacha_doneon()
48 if (l <= CHACHA_BLOCK_SIZE) { in chacha_doneon()
49 u8 buf[CHACHA_BLOCK_SIZE]; in chacha_doneon()
61 state[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE); in chacha_doneon()
86 if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE || in chacha_crypt_arch()
175 .chunksize = CHACHA_BLOCK_SIZE,
176 .walksize = 5 * CHACHA_BLOCK_SIZE,
191 .chunksize = CHACHA_BLOCK_SIZE,
192 .walksize = 5 * CHACHA_BLOCK_SIZE,
207 .chunksize = CHACHA_BLOCK_SIZE,
[all …]
/linux/arch/s390/crypto/
H A Dchacha-glue.c31 *counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; in chacha20_crypt_s390()
51 if (nbytes <= CHACHA_BLOCK_SIZE) { in chacha20_s390()
85 if (bytes <= CHACHA_BLOCK_SIZE || nrounds != 20 || !cpu_has_vx()) in chacha_crypt_arch()
105 .chunksize = CHACHA_BLOCK_SIZE,
/linux/include/vdso/
H A Dgetrandom.h12 #define CHACHA_BLOCK_SIZE 64 macro
36 u8 batch[CHACHA_BLOCK_SIZE * 3 / 2];
39 u8 batch_key[CHACHA_BLOCK_SIZE * 2];
/linux/lib/vdso/
H A Dgetrandom.c235 nblocks = len / CHACHA_BLOCK_SIZE; in __cvdso_getrandom_data()
238 buffer += nblocks * CHACHA_BLOCK_SIZE; in __cvdso_getrandom_data()
239 len -= nblocks * CHACHA_BLOCK_SIZE; in __cvdso_getrandom_data()
242 BUILD_BUG_ON(sizeof(state->batch_key) % CHACHA_BLOCK_SIZE != 0); in __cvdso_getrandom_data()
246 sizeof(state->batch_key) / CHACHA_BLOCK_SIZE); in __cvdso_getrandom_data()
/linux/crypto/
H A Dchacha_generic.c30 nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE); in chacha_stream_xor()
81 .chunksize = CHACHA_BLOCK_SIZE,
96 .chunksize = CHACHA_BLOCK_SIZE,
111 .chunksize = CHACHA_BLOCK_SIZE,
H A Dadiantum.c404 if (round_up(stream_len, CHACHA_BLOCK_SIZE) <= req->cryptlen) in adiantum_crypt()
405 stream_len = round_up(stream_len, CHACHA_BLOCK_SIZE); in adiantum_crypt()
/linux/arch/powerpc/crypto/
H A Dchacha-p10-glue.c47 state[12] += l / CHACHA_BLOCK_SIZE; in chacha_p10_do_8x()
69 if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE || in chacha_crypt_arch()
161 .chunksize = CHACHA_BLOCK_SIZE,
176 .chunksize = CHACHA_BLOCK_SIZE,
191 .chunksize = CHACHA_BLOCK_SIZE,
/linux/arch/mips/crypto/
H A Dchacha-glue.c92 .chunksize = CHACHA_BLOCK_SIZE,
107 .chunksize = CHACHA_BLOCK_SIZE,
122 .chunksize = CHACHA_BLOCK_SIZE,
/linux/include/crypto/
H A Dchacha.h25 #define CHACHA_BLOCK_SIZE 64 macro
28 #define CHACHA_STATE_WORDS (CHACHA_BLOCK_SIZE / sizeof(u32))
/linux/fs/bcachefs/
H A Dchecksum.h176 EBUG_ON(offset & (CHACHA_BLOCK_SIZE - 1)); in nonce_add()
178 le32_add_cpu(&nonce.d[0], offset / CHACHA_BLOCK_SIZE); in nonce_add()
H A Dbtree_io.h115 nonce = nonce_add(nonce, round_up(bytes, CHACHA_BLOCK_SIZE)); in bset_encrypt()
/linux/drivers/char/
H A Drandom.c319 u8 first_block[CHACHA_BLOCK_SIZE]; in crng_fast_key_erasure()
399 u8 tmp[CHACHA_BLOCK_SIZE]; in _get_random_bytes()
411 if (len < CHACHA_BLOCK_SIZE) { in _get_random_bytes()
421 len -= CHACHA_BLOCK_SIZE; in _get_random_bytes()
422 buf += CHACHA_BLOCK_SIZE; in _get_random_bytes()
445 u8 block[CHACHA_BLOCK_SIZE]; in get_random_bytes_user()
505 * formula of (integer_blocks + 0.5) * CHACHA_BLOCK_SIZE. \
507 type entropy[CHACHA_BLOCK_SIZE * 3 / (2 * sizeof(type))]; \