1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * ChaCha stream cipher (RISC-V optimized)
4 *
5 * Copyright (C) 2023 SiFive, Inc.
6 * Author: Jerry Shih <jerry.shih@sifive.com>
7 */
8
9 #include <asm/simd.h>
10 #include <asm/vector.h>
11 #include <crypto/internal/simd.h>
12 #include <linux/linkage.h>
13
14 static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_zvkb);
15
16 asmlinkage void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out,
17 size_t nblocks, int nrounds);
18
19 #define hchacha_block_arch hchacha_block_generic /* not implemented yet */
20
chacha_crypt_arch(struct chacha_state * state,u8 * dst,const u8 * src,unsigned int bytes,int nrounds)21 static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
22 const u8 *src, unsigned int bytes, int nrounds)
23 {
24 u8 block_buffer[CHACHA_BLOCK_SIZE];
25 unsigned int full_blocks = bytes / CHACHA_BLOCK_SIZE;
26 unsigned int tail_bytes = bytes % CHACHA_BLOCK_SIZE;
27
28 if (!static_branch_likely(&use_zvkb) || !crypto_simd_usable())
29 return chacha_crypt_generic(state, dst, src, bytes, nrounds);
30
31 kernel_vector_begin();
32 if (full_blocks) {
33 chacha_zvkb(state, src, dst, full_blocks, nrounds);
34 src += full_blocks * CHACHA_BLOCK_SIZE;
35 dst += full_blocks * CHACHA_BLOCK_SIZE;
36 }
37 if (tail_bytes) {
38 memcpy(block_buffer, src, tail_bytes);
39 chacha_zvkb(state, block_buffer, block_buffer, 1, nrounds);
40 memcpy(dst, block_buffer, tail_bytes);
41 }
42 kernel_vector_end();
43 }
44
45 #define chacha_mod_init_arch chacha_mod_init_arch
chacha_mod_init_arch(void)46 static void chacha_mod_init_arch(void)
47 {
48 if (riscv_isa_extension_available(NULL, ZVKB) &&
49 riscv_vector_vlen() >= 128)
50 static_branch_enable(&use_zvkb);
51 }
52