1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * ChaCha stream cipher (P10 accelerated) 4 * 5 * Copyright 2023- IBM Corp. All rights reserved. 6 */ 7 8 #include <crypto/internal/simd.h> 9 #include <linux/kernel.h> 10 #include <linux/cpufeature.h> 11 #include <linux/sizes.h> 12 #include <asm/simd.h> 13 #include <asm/switch_to.h> 14 15 asmlinkage void chacha_p10le_8x(const struct chacha_state *state, u8 *dst, 16 const u8 *src, unsigned int len, int nrounds); 17 18 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10); 19 20 static void vsx_begin(void) 21 { 22 preempt_disable(); 23 enable_kernel_vsx(); 24 } 25 26 static void vsx_end(void) 27 { 28 disable_kernel_vsx(); 29 preempt_enable(); 30 } 31 32 static void chacha_p10_do_8x(struct chacha_state *state, u8 *dst, const u8 *src, 33 unsigned int bytes, int nrounds) 34 { 35 unsigned int l = bytes & ~0x0FF; 36 37 if (l > 0) { 38 chacha_p10le_8x(state, dst, src, l, nrounds); 39 bytes -= l; 40 src += l; 41 dst += l; 42 state->x[12] += l / CHACHA_BLOCK_SIZE; 43 } 44 45 if (bytes > 0) 46 chacha_crypt_generic(state, dst, src, bytes, nrounds); 47 } 48 49 #define hchacha_block_arch hchacha_block_generic /* not implemented yet */ 50 51 static void chacha_crypt_arch(struct chacha_state *state, u8 *dst, 52 const u8 *src, unsigned int bytes, int nrounds) 53 { 54 if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE || 55 !crypto_simd_usable()) 56 return chacha_crypt_generic(state, dst, src, bytes, nrounds); 57 58 do { 59 unsigned int todo = min_t(unsigned int, bytes, SZ_4K); 60 61 vsx_begin(); 62 chacha_p10_do_8x(state, dst, src, todo, nrounds); 63 vsx_end(); 64 65 bytes -= todo; 66 src += todo; 67 dst += todo; 68 } while (bytes); 69 } 70 71 #define chacha_mod_init_arch chacha_mod_init_arch 72 static void chacha_mod_init_arch(void) 73 { 74 if (cpu_has_feature(CPU_FTR_ARCH_31)) 75 static_branch_enable(&have_p10); 76 } 77