1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
4 *
5 * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
6 */
7
8 #include <linux/cpufeature.h>
9
10 #include <crypto/internal/simd.h>
11
12 #include <asm/hwcap.h>
13 #include <asm/neon.h>
14 #include <asm/simd.h>
15
16 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32);
17 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pmull);
18
19 #define PMULL_MIN_LEN 64 /* min size of buffer for pmull functions */
20
21 asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
22 asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
23
24 asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
25 asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
26
crc32_le_scalar(u32 crc,const u8 * p,size_t len)27 static inline u32 crc32_le_scalar(u32 crc, const u8 *p, size_t len)
28 {
29 if (static_branch_likely(&have_crc32))
30 return crc32_armv8_le(crc, p, len);
31 return crc32_le_base(crc, p, len);
32 }
33
crc32_le_arch(u32 crc,const u8 * p,size_t len)34 static inline u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
35 {
36 if (len >= PMULL_MIN_LEN + 15 &&
37 static_branch_likely(&have_pmull) && crypto_simd_usable()) {
38 size_t n = -(uintptr_t)p & 15;
39
40 /* align p to 16-byte boundary */
41 if (n) {
42 crc = crc32_le_scalar(crc, p, n);
43 p += n;
44 len -= n;
45 }
46 n = round_down(len, 16);
47 kernel_neon_begin();
48 crc = crc32_pmull_le(p, n, crc);
49 kernel_neon_end();
50 p += n;
51 len -= n;
52 }
53 return crc32_le_scalar(crc, p, len);
54 }
55
crc32c_scalar(u32 crc,const u8 * p,size_t len)56 static inline u32 crc32c_scalar(u32 crc, const u8 *p, size_t len)
57 {
58 if (static_branch_likely(&have_crc32))
59 return crc32c_armv8_le(crc, p, len);
60 return crc32c_base(crc, p, len);
61 }
62
crc32c_arch(u32 crc,const u8 * p,size_t len)63 static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
64 {
65 if (len >= PMULL_MIN_LEN + 15 &&
66 static_branch_likely(&have_pmull) && crypto_simd_usable()) {
67 size_t n = -(uintptr_t)p & 15;
68
69 /* align p to 16-byte boundary */
70 if (n) {
71 crc = crc32c_scalar(crc, p, n);
72 p += n;
73 len -= n;
74 }
75 n = round_down(len, 16);
76 kernel_neon_begin();
77 crc = crc32c_pmull_le(p, n, crc);
78 kernel_neon_end();
79 p += n;
80 len -= n;
81 }
82 return crc32c_scalar(crc, p, len);
83 }
84
85 #define crc32_be_arch crc32_be_base /* not implemented on this arch */
86
87 #define crc32_mod_init_arch crc32_mod_init_arch
crc32_mod_init_arch(void)88 static inline void crc32_mod_init_arch(void)
89 {
90 if (elf_hwcap2 & HWCAP2_CRC32)
91 static_branch_enable(&have_crc32);
92 if (elf_hwcap2 & HWCAP2_PMULL)
93 static_branch_enable(&have_pmull);
94 }
95
crc32_optimizations_arch(void)96 static inline u32 crc32_optimizations_arch(void)
97 {
98 if (elf_hwcap2 & (HWCAP2_CRC32 | HWCAP2_PMULL))
99 return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
100 return 0;
101 }
102