1 /* 2 * ChaCha and HChaCha functions (ARM64 optimized) 3 * 4 * Copyright (C) 2016 - 2017 Linaro, Ltd. <ard.biesheuvel@linaro.org> 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License version 2 as 8 * published by the Free Software Foundation. 9 * 10 * Based on: 11 * ChaCha20 256-bit cipher algorithm, RFC7539, SIMD glue code 12 * 13 * Copyright (C) 2015 Martin Willi 14 * 15 * This program is free software; you can redistribute it and/or modify 16 * it under the terms of the GNU General Public License as published by 17 * the Free Software Foundation; either version 2 of the License, or 18 * (at your option) any later version. 19 */ 20 21 #include <crypto/internal/simd.h> 22 #include <linux/jump_label.h> 23 #include <linux/kernel.h> 24 25 #include <asm/hwcap.h> 26 #include <asm/simd.h> 27 28 asmlinkage void chacha_block_xor_neon(const struct chacha_state *state, 29 u8 *dst, const u8 *src, int nrounds); 30 asmlinkage void chacha_4block_xor_neon(const struct chacha_state *state, 31 u8 *dst, const u8 *src, 32 int nrounds, int bytes); 33 asmlinkage void hchacha_block_neon(const struct chacha_state *state, 34 u32 out[HCHACHA_OUT_WORDS], int nrounds); 35 36 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon); 37 38 static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src, 39 int bytes, int nrounds) 40 { 41 while (bytes > 0) { 42 int l = min(bytes, CHACHA_BLOCK_SIZE * 5); 43 44 if (l <= CHACHA_BLOCK_SIZE) { 45 u8 buf[CHACHA_BLOCK_SIZE]; 46 47 memcpy(buf, src, l); 48 chacha_block_xor_neon(state, buf, buf, nrounds); 49 memcpy(dst, buf, l); 50 state->x[12] += 1; 51 break; 52 } 53 chacha_4block_xor_neon(state, dst, src, nrounds, l); 54 bytes -= l; 55 src += l; 56 dst += l; 57 state->x[12] += DIV_ROUND_UP(l, CHACHA_BLOCK_SIZE); 58 } 59 } 60 61 static void hchacha_block_arch(const struct chacha_state *state, 62 u32 out[HCHACHA_OUT_WORDS], int nrounds) 63 { 64 if (!static_branch_likely(&have_neon) || !crypto_simd_usable()) { 65 hchacha_block_generic(state, out, nrounds); 66 } else { 67 scoped_ksimd() 68 hchacha_block_neon(state, out, nrounds); 69 } 70 } 71 72 static void chacha_crypt_arch(struct chacha_state *state, u8 *dst, 73 const u8 *src, unsigned int bytes, int nrounds) 74 { 75 if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE || 76 !crypto_simd_usable()) 77 return chacha_crypt_generic(state, dst, src, bytes, nrounds); 78 79 do { 80 unsigned int todo = min_t(unsigned int, bytes, SZ_4K); 81 82 scoped_ksimd() 83 chacha_doneon(state, dst, src, todo, nrounds); 84 85 bytes -= todo; 86 src += todo; 87 dst += todo; 88 } while (bytes); 89 } 90 91 #define chacha_mod_init_arch chacha_mod_init_arch 92 static void chacha_mod_init_arch(void) 93 { 94 if (cpu_have_named_feature(ASIMD)) 95 static_branch_enable(&have_neon); 96 } 97