xref: /linux/arch/arm/include/asm/word-at-a-time.h (revision 79790b6818e96c58fe2bffee1b418c16e64e7b80)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_ARM_WORD_AT_A_TIME_H
3 #define __ASM_ARM_WORD_AT_A_TIME_H
4 
5 #ifndef __ARMEB__
6 
7 /*
8  * Little-endian word-at-a-time zero byte handling.
9  * Heavily based on the x86 algorithm.
10  */
11 #include <linux/bitops.h>
12 #include <linux/wordpart.h>
13 
14 struct word_at_a_time {
15 	const unsigned long one_bits, high_bits;
16 };
17 
18 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
19 
has_zero(unsigned long a,unsigned long * bits,const struct word_at_a_time * c)20 static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
21 				     const struct word_at_a_time *c)
22 {
23 	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
24 	*bits = mask;
25 	return mask;
26 }
27 
28 #define prep_zero_mask(a, bits, c) (bits)
29 
create_zero_mask(unsigned long bits)30 static inline unsigned long create_zero_mask(unsigned long bits)
31 {
32 	bits = (bits - 1) & ~bits;
33 	return bits >> 7;
34 }
35 
find_zero(unsigned long mask)36 static inline unsigned long find_zero(unsigned long mask)
37 {
38 	unsigned long ret;
39 
40 #if __LINUX_ARM_ARCH__ >= 5
41 	/* We have clz available. */
42 	ret = fls(mask) >> 3;
43 #else
44 	/* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
45 	ret = (0x0ff0001 + mask) >> 23;
46 	/* Fix the 1 for 00 case */
47 	ret &= mask;
48 #endif
49 
50 	return ret;
51 }
52 
53 #define zero_bytemask(mask) (mask)
54 
55 #else	/* __ARMEB__ */
56 #include <asm-generic/word-at-a-time.h>
57 #endif
58 
59 #ifdef CONFIG_DCACHE_WORD_ACCESS
60 
61 /*
62  * Load an unaligned word from kernel space.
63  *
64  * In the (very unlikely) case of the word being a page-crosser
65  * and the next page not being mapped, take the exception and
66  * return zeroes in the non-existing part.
67  */
load_unaligned_zeropad(const void * addr)68 static inline unsigned long load_unaligned_zeropad(const void *addr)
69 {
70 	unsigned long ret, offset;
71 
72 	/* Load word from unaligned pointer addr */
73 	asm(
74 	"1:	ldr	%0, [%2]\n"
75 	"2:\n"
76 	"	.pushsection .text.fixup,\"ax\"\n"
77 	"	.align 2\n"
78 	"3:	and	%1, %2, #0x3\n"
79 	"	bic	%2, %2, #0x3\n"
80 	"	ldr	%0, [%2]\n"
81 	"	lsl	%1, %1, #0x3\n"
82 #ifndef __ARMEB__
83 	"	lsr	%0, %0, %1\n"
84 #else
85 	"	lsl	%0, %0, %1\n"
86 #endif
87 	"	b	2b\n"
88 	"	.popsection\n"
89 	"	.pushsection __ex_table,\"a\"\n"
90 	"	.align	3\n"
91 	"	.long	1b, 3b\n"
92 	"	.popsection"
93 	: "=&r" (ret), "=&r" (offset)
94 	: "r" (addr), "Qo" (*(unsigned long *)addr));
95 
96 	return ret;
97 }
98 
99 #endif	/* DCACHE_WORD_ACCESS */
100 #endif /* __ASM_ARM_WORD_AT_A_TIME_H */
101