1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
4 *
5 * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
6 */
7
8 #include <linux/cpufeature.h>
9 #include <linux/crc32.h>
10 #include <linux/init.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/string.h>
14
15 #include <crypto/internal/simd.h>
16
17 #include <asm/hwcap.h>
18 #include <asm/neon.h>
19 #include <asm/simd.h>
20
21 static DEFINE_STATIC_KEY_FALSE(have_crc32);
22 static DEFINE_STATIC_KEY_FALSE(have_pmull);
23
24 #define PMULL_MIN_LEN 64 /* min size of buffer for pmull functions */
25
26 asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
27 asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
28
29 asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
30 asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
31
crc32_le_scalar(u32 crc,const u8 * p,size_t len)32 static u32 crc32_le_scalar(u32 crc, const u8 *p, size_t len)
33 {
34 if (static_branch_likely(&have_crc32))
35 return crc32_armv8_le(crc, p, len);
36 return crc32_le_base(crc, p, len);
37 }
38
crc32_le_arch(u32 crc,const u8 * p,size_t len)39 u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
40 {
41 if (len >= PMULL_MIN_LEN + 15 &&
42 static_branch_likely(&have_pmull) && crypto_simd_usable()) {
43 size_t n = -(uintptr_t)p & 15;
44
45 /* align p to 16-byte boundary */
46 if (n) {
47 crc = crc32_le_scalar(crc, p, n);
48 p += n;
49 len -= n;
50 }
51 n = round_down(len, 16);
52 kernel_neon_begin();
53 crc = crc32_pmull_le(p, n, crc);
54 kernel_neon_end();
55 p += n;
56 len -= n;
57 }
58 return crc32_le_scalar(crc, p, len);
59 }
60 EXPORT_SYMBOL(crc32_le_arch);
61
crc32c_le_scalar(u32 crc,const u8 * p,size_t len)62 static u32 crc32c_le_scalar(u32 crc, const u8 *p, size_t len)
63 {
64 if (static_branch_likely(&have_crc32))
65 return crc32c_armv8_le(crc, p, len);
66 return crc32c_le_base(crc, p, len);
67 }
68
crc32c_le_arch(u32 crc,const u8 * p,size_t len)69 u32 crc32c_le_arch(u32 crc, const u8 *p, size_t len)
70 {
71 if (len >= PMULL_MIN_LEN + 15 &&
72 static_branch_likely(&have_pmull) && crypto_simd_usable()) {
73 size_t n = -(uintptr_t)p & 15;
74
75 /* align p to 16-byte boundary */
76 if (n) {
77 crc = crc32c_le_scalar(crc, p, n);
78 p += n;
79 len -= n;
80 }
81 n = round_down(len, 16);
82 kernel_neon_begin();
83 crc = crc32c_pmull_le(p, n, crc);
84 kernel_neon_end();
85 p += n;
86 len -= n;
87 }
88 return crc32c_le_scalar(crc, p, len);
89 }
90 EXPORT_SYMBOL(crc32c_le_arch);
91
crc32_be_arch(u32 crc,const u8 * p,size_t len)92 u32 crc32_be_arch(u32 crc, const u8 *p, size_t len)
93 {
94 return crc32_be_base(crc, p, len);
95 }
96 EXPORT_SYMBOL(crc32_be_arch);
97
crc32_arm_init(void)98 static int __init crc32_arm_init(void)
99 {
100 if (elf_hwcap2 & HWCAP2_CRC32)
101 static_branch_enable(&have_crc32);
102 if (elf_hwcap2 & HWCAP2_PMULL)
103 static_branch_enable(&have_pmull);
104 return 0;
105 }
106 arch_initcall(crc32_arm_init);
107
crc32_arm_exit(void)108 static void __exit crc32_arm_exit(void)
109 {
110 }
111 module_exit(crc32_arm_exit);
112
crc32_optimizations(void)113 u32 crc32_optimizations(void)
114 {
115 if (elf_hwcap2 & (HWCAP2_CRC32 | HWCAP2_PMULL))
116 return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
117 return 0;
118 }
119 EXPORT_SYMBOL(crc32_optimizations);
120
121 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
122 MODULE_DESCRIPTION("Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions");
123 MODULE_LICENSE("GPL v2");
124