1*289c270eSEric Biggers // SPDX-License-Identifier: GPL-2.0
2*289c270eSEric Biggers /*
3*289c270eSEric Biggers * crc32-mips.c - CRC32 and CRC32C using optional MIPSr6 instructions
4*289c270eSEric Biggers *
5*289c270eSEric Biggers * Module based on arm64/crypto/crc32-arm.c
6*289c270eSEric Biggers *
7*289c270eSEric Biggers * Copyright (C) 2014 Linaro Ltd <yazen.ghannam@linaro.org>
8*289c270eSEric Biggers * Copyright (C) 2018 MIPS Tech, LLC
9*289c270eSEric Biggers */
10*289c270eSEric Biggers
11*289c270eSEric Biggers #include <linux/cpufeature.h>
12*289c270eSEric Biggers #include <linux/crc32.h>
13*289c270eSEric Biggers #include <linux/init.h>
14*289c270eSEric Biggers #include <linux/kernel.h>
15*289c270eSEric Biggers #include <linux/module.h>
16*289c270eSEric Biggers #include <asm/mipsregs.h>
17*289c270eSEric Biggers #include <linux/unaligned.h>
18*289c270eSEric Biggers
19*289c270eSEric Biggers enum crc_op_size {
20*289c270eSEric Biggers b, h, w, d,
21*289c270eSEric Biggers };
22*289c270eSEric Biggers
23*289c270eSEric Biggers enum crc_type {
24*289c270eSEric Biggers crc32,
25*289c270eSEric Biggers crc32c,
26*289c270eSEric Biggers };
27*289c270eSEric Biggers
28*289c270eSEric Biggers #ifndef TOOLCHAIN_SUPPORTS_CRC
29*289c270eSEric Biggers #define _ASM_SET_CRC(OP, SZ, TYPE) \
30*289c270eSEric Biggers _ASM_MACRO_3R(OP, rt, rs, rt2, \
31*289c270eSEric Biggers ".ifnc \\rt, \\rt2\n\t" \
32*289c270eSEric Biggers ".error \"invalid operands \\\"" #OP " \\rt,\\rs,\\rt2\\\"\"\n\t" \
33*289c270eSEric Biggers ".endif\n\t" \
34*289c270eSEric Biggers _ASM_INSN_IF_MIPS(0x7c00000f | (__rt << 16) | (__rs << 21) | \
35*289c270eSEric Biggers ((SZ) << 6) | ((TYPE) << 8)) \
36*289c270eSEric Biggers _ASM_INSN32_IF_MM(0x00000030 | (__rs << 16) | (__rt << 21) | \
37*289c270eSEric Biggers ((SZ) << 14) | ((TYPE) << 3)))
38*289c270eSEric Biggers #define _ASM_UNSET_CRC(op, SZ, TYPE) ".purgem " #op "\n\t"
39*289c270eSEric Biggers #else /* !TOOLCHAIN_SUPPORTS_CRC */
40*289c270eSEric Biggers #define _ASM_SET_CRC(op, SZ, TYPE) ".set\tcrc\n\t"
41*289c270eSEric Biggers #define _ASM_UNSET_CRC(op, SZ, TYPE)
42*289c270eSEric Biggers #endif
43*289c270eSEric Biggers
44*289c270eSEric Biggers #define __CRC32(crc, value, op, SZ, TYPE) \
45*289c270eSEric Biggers do { \
46*289c270eSEric Biggers __asm__ __volatile__( \
47*289c270eSEric Biggers ".set push\n\t" \
48*289c270eSEric Biggers _ASM_SET_CRC(op, SZ, TYPE) \
49*289c270eSEric Biggers #op " %0, %1, %0\n\t" \
50*289c270eSEric Biggers _ASM_UNSET_CRC(op, SZ, TYPE) \
51*289c270eSEric Biggers ".set pop" \
52*289c270eSEric Biggers : "+r" (crc) \
53*289c270eSEric Biggers : "r" (value)); \
54*289c270eSEric Biggers } while (0)
55*289c270eSEric Biggers
56*289c270eSEric Biggers #define _CRC32_crc32b(crc, value) __CRC32(crc, value, crc32b, 0, 0)
57*289c270eSEric Biggers #define _CRC32_crc32h(crc, value) __CRC32(crc, value, crc32h, 1, 0)
58*289c270eSEric Biggers #define _CRC32_crc32w(crc, value) __CRC32(crc, value, crc32w, 2, 0)
59*289c270eSEric Biggers #define _CRC32_crc32d(crc, value) __CRC32(crc, value, crc32d, 3, 0)
60*289c270eSEric Biggers #define _CRC32_crc32cb(crc, value) __CRC32(crc, value, crc32cb, 0, 1)
61*289c270eSEric Biggers #define _CRC32_crc32ch(crc, value) __CRC32(crc, value, crc32ch, 1, 1)
62*289c270eSEric Biggers #define _CRC32_crc32cw(crc, value) __CRC32(crc, value, crc32cw, 2, 1)
63*289c270eSEric Biggers #define _CRC32_crc32cd(crc, value) __CRC32(crc, value, crc32cd, 3, 1)
64*289c270eSEric Biggers
65*289c270eSEric Biggers #define _CRC32(crc, value, size, op) \
66*289c270eSEric Biggers _CRC32_##op##size(crc, value)
67*289c270eSEric Biggers
68*289c270eSEric Biggers #define CRC32(crc, value, size) \
69*289c270eSEric Biggers _CRC32(crc, value, size, crc32)
70*289c270eSEric Biggers
71*289c270eSEric Biggers #define CRC32C(crc, value, size) \
72*289c270eSEric Biggers _CRC32(crc, value, size, crc32c)
73*289c270eSEric Biggers
74*289c270eSEric Biggers static DEFINE_STATIC_KEY_FALSE(have_crc32);
75*289c270eSEric Biggers
crc32_le_arch(u32 crc,const u8 * p,size_t len)76*289c270eSEric Biggers u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
77*289c270eSEric Biggers {
78*289c270eSEric Biggers if (!static_branch_likely(&have_crc32))
79*289c270eSEric Biggers return crc32_le_base(crc, p, len);
80*289c270eSEric Biggers
81*289c270eSEric Biggers if (IS_ENABLED(CONFIG_64BIT)) {
82*289c270eSEric Biggers for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
83*289c270eSEric Biggers u64 value = get_unaligned_le64(p);
84*289c270eSEric Biggers
85*289c270eSEric Biggers CRC32(crc, value, d);
86*289c270eSEric Biggers }
87*289c270eSEric Biggers
88*289c270eSEric Biggers if (len & sizeof(u32)) {
89*289c270eSEric Biggers u32 value = get_unaligned_le32(p);
90*289c270eSEric Biggers
91*289c270eSEric Biggers CRC32(crc, value, w);
92*289c270eSEric Biggers p += sizeof(u32);
93*289c270eSEric Biggers }
94*289c270eSEric Biggers } else {
95*289c270eSEric Biggers for (; len >= sizeof(u32); len -= sizeof(u32)) {
96*289c270eSEric Biggers u32 value = get_unaligned_le32(p);
97*289c270eSEric Biggers
98*289c270eSEric Biggers CRC32(crc, value, w);
99*289c270eSEric Biggers p += sizeof(u32);
100*289c270eSEric Biggers }
101*289c270eSEric Biggers }
102*289c270eSEric Biggers
103*289c270eSEric Biggers if (len & sizeof(u16)) {
104*289c270eSEric Biggers u16 value = get_unaligned_le16(p);
105*289c270eSEric Biggers
106*289c270eSEric Biggers CRC32(crc, value, h);
107*289c270eSEric Biggers p += sizeof(u16);
108*289c270eSEric Biggers }
109*289c270eSEric Biggers
110*289c270eSEric Biggers if (len & sizeof(u8)) {
111*289c270eSEric Biggers u8 value = *p++;
112*289c270eSEric Biggers
113*289c270eSEric Biggers CRC32(crc, value, b);
114*289c270eSEric Biggers }
115*289c270eSEric Biggers
116*289c270eSEric Biggers return crc;
117*289c270eSEric Biggers }
118*289c270eSEric Biggers EXPORT_SYMBOL(crc32_le_arch);
119*289c270eSEric Biggers
crc32c_le_arch(u32 crc,const u8 * p,size_t len)120*289c270eSEric Biggers u32 crc32c_le_arch(u32 crc, const u8 *p, size_t len)
121*289c270eSEric Biggers {
122*289c270eSEric Biggers if (!static_branch_likely(&have_crc32))
123*289c270eSEric Biggers return crc32c_le_base(crc, p, len);
124*289c270eSEric Biggers
125*289c270eSEric Biggers if (IS_ENABLED(CONFIG_64BIT)) {
126*289c270eSEric Biggers for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
127*289c270eSEric Biggers u64 value = get_unaligned_le64(p);
128*289c270eSEric Biggers
129*289c270eSEric Biggers CRC32C(crc, value, d);
130*289c270eSEric Biggers }
131*289c270eSEric Biggers
132*289c270eSEric Biggers if (len & sizeof(u32)) {
133*289c270eSEric Biggers u32 value = get_unaligned_le32(p);
134*289c270eSEric Biggers
135*289c270eSEric Biggers CRC32C(crc, value, w);
136*289c270eSEric Biggers p += sizeof(u32);
137*289c270eSEric Biggers }
138*289c270eSEric Biggers } else {
139*289c270eSEric Biggers for (; len >= sizeof(u32); len -= sizeof(u32)) {
140*289c270eSEric Biggers u32 value = get_unaligned_le32(p);
141*289c270eSEric Biggers
142*289c270eSEric Biggers CRC32C(crc, value, w);
143*289c270eSEric Biggers p += sizeof(u32);
144*289c270eSEric Biggers }
145*289c270eSEric Biggers }
146*289c270eSEric Biggers
147*289c270eSEric Biggers if (len & sizeof(u16)) {
148*289c270eSEric Biggers u16 value = get_unaligned_le16(p);
149*289c270eSEric Biggers
150*289c270eSEric Biggers CRC32C(crc, value, h);
151*289c270eSEric Biggers p += sizeof(u16);
152*289c270eSEric Biggers }
153*289c270eSEric Biggers
154*289c270eSEric Biggers if (len & sizeof(u8)) {
155*289c270eSEric Biggers u8 value = *p++;
156*289c270eSEric Biggers
157*289c270eSEric Biggers CRC32C(crc, value, b);
158*289c270eSEric Biggers }
159*289c270eSEric Biggers return crc;
160*289c270eSEric Biggers }
161*289c270eSEric Biggers EXPORT_SYMBOL(crc32c_le_arch);
162*289c270eSEric Biggers
crc32_be_arch(u32 crc,const u8 * p,size_t len)163*289c270eSEric Biggers u32 crc32_be_arch(u32 crc, const u8 *p, size_t len)
164*289c270eSEric Biggers {
165*289c270eSEric Biggers return crc32_be_base(crc, p, len);
166*289c270eSEric Biggers }
167*289c270eSEric Biggers EXPORT_SYMBOL(crc32_be_arch);
168*289c270eSEric Biggers
crc32_mips_init(void)169*289c270eSEric Biggers static int __init crc32_mips_init(void)
170*289c270eSEric Biggers {
171*289c270eSEric Biggers if (cpu_have_feature(cpu_feature(MIPS_CRC32)))
172*289c270eSEric Biggers static_branch_enable(&have_crc32);
173*289c270eSEric Biggers return 0;
174*289c270eSEric Biggers }
175*289c270eSEric Biggers arch_initcall(crc32_mips_init);
176*289c270eSEric Biggers
crc32_mips_exit(void)177*289c270eSEric Biggers static void __exit crc32_mips_exit(void)
178*289c270eSEric Biggers {
179*289c270eSEric Biggers }
180*289c270eSEric Biggers module_exit(crc32_mips_exit);
181*289c270eSEric Biggers
crc32_optimizations(void)182*289c270eSEric Biggers u32 crc32_optimizations(void)
183*289c270eSEric Biggers {
184*289c270eSEric Biggers if (static_key_enabled(&have_crc32))
185*289c270eSEric Biggers return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
186*289c270eSEric Biggers return 0;
187*289c270eSEric Biggers }
188*289c270eSEric Biggers EXPORT_SYMBOL(crc32_optimizations);
189*289c270eSEric Biggers
190*289c270eSEric Biggers MODULE_AUTHOR("Marcin Nowakowski <marcin.nowakowski@mips.com");
191*289c270eSEric Biggers MODULE_DESCRIPTION("CRC32 and CRC32C using optional MIPS instructions");
192*289c270eSEric Biggers MODULE_LICENSE("GPL v2");
193