1 // SPDX-License-Identifier: GPL-2.0-only 2 #include <asm/simd.h> 3 #include <asm/switch_to.h> 4 #include <linux/cpufeature.h> 5 #include <linux/jump_label.h> 6 #include <linux/preempt.h> 7 #include <linux/uaccess.h> 8 9 #define VMX_ALIGN 16 10 #define VMX_ALIGN_MASK (VMX_ALIGN-1) 11 12 #define VECTOR_BREAKPOINT 512 13 14 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_vec_crypto); 15 16 #define crc32_le_arch crc32_le_base /* not implemented on this arch */ 17 #define crc32_be_arch crc32_be_base /* not implemented on this arch */ 18 19 u32 __crc32c_vpmsum(u32 crc, const u8 *p, size_t len); 20 21 static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len) 22 { 23 unsigned int prealign; 24 unsigned int tail; 25 26 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || 27 !static_branch_likely(&have_vec_crypto) || 28 unlikely(!may_use_simd())) 29 return crc32c_base(crc, p, len); 30 31 if ((unsigned long)p & VMX_ALIGN_MASK) { 32 prealign = VMX_ALIGN - ((unsigned long)p & VMX_ALIGN_MASK); 33 crc = crc32c_base(crc, p, prealign); 34 len -= prealign; 35 p += prealign; 36 } 37 38 if (len & ~VMX_ALIGN_MASK) { 39 preempt_disable(); 40 pagefault_disable(); 41 enable_kernel_altivec(); 42 crc = __crc32c_vpmsum(crc, p, len & ~VMX_ALIGN_MASK); 43 disable_kernel_altivec(); 44 pagefault_enable(); 45 preempt_enable(); 46 } 47 48 tail = len & VMX_ALIGN_MASK; 49 if (tail) { 50 p += len & ~VMX_ALIGN_MASK; 51 crc = crc32c_base(crc, p, tail); 52 } 53 54 return crc; 55 } 56 57 #define crc32_mod_init_arch crc32_mod_init_arch 58 static void crc32_mod_init_arch(void) 59 { 60 if (cpu_has_feature(CPU_FTR_ARCH_207S) && 61 (cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO)) 62 static_branch_enable(&have_vec_crypto); 63 } 64 65 static inline u32 crc32_optimizations_arch(void) 66 { 67 if (static_key_enabled(&have_vec_crypto)) 68 return CRC32C_OPTIMIZATION; 69 return 0; 70 } 71