xref: /linux/lib/crc/arm/crc32.h (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
4  *
5  * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <linux/cpufeature.h>
9 
10 #include <asm/hwcap.h>
11 #include <asm/simd.h>
12 
13 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32);
14 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pmull);
15 
16 #define PMULL_MIN_LEN	64	/* min size of buffer for pmull functions */
17 
18 asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
19 asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
20 
21 asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
22 asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
23 
24 static inline u32 crc32_le_scalar(u32 crc, const u8 *p, size_t len)
25 {
26 	if (static_branch_likely(&have_crc32))
27 		return crc32_armv8_le(crc, p, len);
28 	return crc32_le_base(crc, p, len);
29 }
30 
31 static inline u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
32 {
33 	if (len >= PMULL_MIN_LEN + 15 &&
34 	    static_branch_likely(&have_pmull) && likely(may_use_simd())) {
35 		size_t n = -(uintptr_t)p & 15;
36 
37 		/* align p to 16-byte boundary */
38 		if (n) {
39 			crc = crc32_le_scalar(crc, p, n);
40 			p += n;
41 			len -= n;
42 		}
43 		n = round_down(len, 16);
44 		scoped_ksimd()
45 			crc = crc32_pmull_le(p, n, crc);
46 		p += n;
47 		len -= n;
48 	}
49 	return crc32_le_scalar(crc, p, len);
50 }
51 
52 static inline u32 crc32c_scalar(u32 crc, const u8 *p, size_t len)
53 {
54 	if (static_branch_likely(&have_crc32))
55 		return crc32c_armv8_le(crc, p, len);
56 	return crc32c_base(crc, p, len);
57 }
58 
59 static inline u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
60 {
61 	if (len >= PMULL_MIN_LEN + 15 &&
62 	    static_branch_likely(&have_pmull) && likely(may_use_simd())) {
63 		size_t n = -(uintptr_t)p & 15;
64 
65 		/* align p to 16-byte boundary */
66 		if (n) {
67 			crc = crc32c_scalar(crc, p, n);
68 			p += n;
69 			len -= n;
70 		}
71 		n = round_down(len, 16);
72 		scoped_ksimd()
73 			crc = crc32c_pmull_le(p, n, crc);
74 		p += n;
75 		len -= n;
76 	}
77 	return crc32c_scalar(crc, p, len);
78 }
79 
80 #define crc32_be_arch crc32_be_base /* not implemented on this arch */
81 
82 #define crc32_mod_init_arch crc32_mod_init_arch
83 static void crc32_mod_init_arch(void)
84 {
85 	if (elf_hwcap2 & HWCAP2_CRC32)
86 		static_branch_enable(&have_crc32);
87 	if (elf_hwcap2 & HWCAP2_PMULL)
88 		static_branch_enable(&have_pmull);
89 }
90 
91 static inline u32 crc32_optimizations_arch(void)
92 {
93 	if (elf_hwcap2 & (HWCAP2_CRC32 | HWCAP2_PMULL))
94 		return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
95 	return 0;
96 }
97