xref: /linux/arch/riscv/include/asm/bitops.h (revision 2672031b20f6681514bef14ddcfe8c62c2757d11)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2012 Regents of the University of California
4  */
5 
6 #ifndef _ASM_RISCV_BITOPS_H
7 #define _ASM_RISCV_BITOPS_H
8 
9 #ifndef _LINUX_BITOPS_H
10 #error "Only <linux/bitops.h> can be included directly"
11 #endif /* _LINUX_BITOPS_H */
12 
13 #include <linux/compiler.h>
14 #include <linux/irqflags.h>
15 #include <asm/barrier.h>
16 #include <asm/bitsperlong.h>
17 
18 #if !defined(CONFIG_RISCV_ISA_ZBB) || defined(NO_ALTERNATIVE)
19 #include <asm-generic/bitops/__ffs.h>
20 #include <asm-generic/bitops/__fls.h>
21 #include <asm-generic/bitops/ffs.h>
22 #include <asm-generic/bitops/fls.h>
23 
24 #else
25 #include <asm/alternative-macros.h>
26 #include <asm/hwcap.h>
27 
28 #if (BITS_PER_LONG == 64)
29 #define CTZW	"ctzw "
30 #define CLZW	"clzw "
31 #elif (BITS_PER_LONG == 32)
32 #define CTZW	"ctz "
33 #define CLZW	"clz "
34 #else
35 #error "Unexpected BITS_PER_LONG"
36 #endif
37 
38 static __always_inline unsigned long variable__ffs(unsigned long word)
39 {
40 	int num;
41 
42 	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
43 				      RISCV_ISA_EXT_ZBB, 1)
44 			  : : : : legacy);
45 
46 	asm volatile (".option push\n"
47 		      ".option arch,+zbb\n"
48 		      "ctz %0, %1\n"
49 		      ".option pop\n"
50 		      : "=r" (word) : "r" (word) :);
51 
52 	return word;
53 
54 legacy:
55 	num = 0;
56 #if BITS_PER_LONG == 64
57 	if ((word & 0xffffffff) == 0) {
58 		num += 32;
59 		word >>= 32;
60 	}
61 #endif
62 	if ((word & 0xffff) == 0) {
63 		num += 16;
64 		word >>= 16;
65 	}
66 	if ((word & 0xff) == 0) {
67 		num += 8;
68 		word >>= 8;
69 	}
70 	if ((word & 0xf) == 0) {
71 		num += 4;
72 		word >>= 4;
73 	}
74 	if ((word & 0x3) == 0) {
75 		num += 2;
76 		word >>= 2;
77 	}
78 	if ((word & 0x1) == 0)
79 		num += 1;
80 	return num;
81 }
82 
83 /**
84  * __ffs - find first set bit in a long word
85  * @word: The word to search
86  *
87  * Undefined if no set bit exists, so code should check against 0 first.
88  */
89 #define __ffs(word)				\
90 	(__builtin_constant_p(word) ?		\
91 	 (unsigned long)__builtin_ctzl(word) :	\
92 	 variable__ffs(word))
93 
94 static __always_inline unsigned long variable__fls(unsigned long word)
95 {
96 	int num;
97 
98 	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
99 				      RISCV_ISA_EXT_ZBB, 1)
100 			  : : : : legacy);
101 
102 	asm volatile (".option push\n"
103 		      ".option arch,+zbb\n"
104 		      "clz %0, %1\n"
105 		      ".option pop\n"
106 		      : "=r" (word) : "r" (word) :);
107 
108 	return BITS_PER_LONG - 1 - word;
109 
110 legacy:
111 	num = BITS_PER_LONG - 1;
112 #if BITS_PER_LONG == 64
113 	if (!(word & (~0ul << 32))) {
114 		num -= 32;
115 		word <<= 32;
116 	}
117 #endif
118 	if (!(word & (~0ul << (BITS_PER_LONG - 16)))) {
119 		num -= 16;
120 		word <<= 16;
121 	}
122 	if (!(word & (~0ul << (BITS_PER_LONG - 8)))) {
123 		num -= 8;
124 		word <<= 8;
125 	}
126 	if (!(word & (~0ul << (BITS_PER_LONG - 4)))) {
127 		num -= 4;
128 		word <<= 4;
129 	}
130 	if (!(word & (~0ul << (BITS_PER_LONG - 2)))) {
131 		num -= 2;
132 		word <<= 2;
133 	}
134 	if (!(word & (~0ul << (BITS_PER_LONG - 1))))
135 		num -= 1;
136 	return num;
137 }
138 
139 /**
140  * __fls - find last set bit in a long word
141  * @word: the word to search
142  *
143  * Undefined if no set bit exists, so code should check against 0 first.
144  */
145 #define __fls(word)							\
146 	(__builtin_constant_p(word) ?					\
147 	 (unsigned long)(BITS_PER_LONG - 1 - __builtin_clzl(word)) :	\
148 	 variable__fls(word))
149 
150 static __always_inline int variable_ffs(int x)
151 {
152 	int r;
153 
154 	if (!x)
155 		return 0;
156 
157 	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
158 				      RISCV_ISA_EXT_ZBB, 1)
159 			  : : : : legacy);
160 
161 	asm volatile (".option push\n"
162 		      ".option arch,+zbb\n"
163 		      CTZW "%0, %1\n"
164 		      ".option pop\n"
165 		      : "=r" (r) : "r" (x) :);
166 
167 	return r + 1;
168 
169 legacy:
170 	r = 1;
171 	if (!(x & 0xffff)) {
172 		x >>= 16;
173 		r += 16;
174 	}
175 	if (!(x & 0xff)) {
176 		x >>= 8;
177 		r += 8;
178 	}
179 	if (!(x & 0xf)) {
180 		x >>= 4;
181 		r += 4;
182 	}
183 	if (!(x & 3)) {
184 		x >>= 2;
185 		r += 2;
186 	}
187 	if (!(x & 1)) {
188 		x >>= 1;
189 		r += 1;
190 	}
191 	return r;
192 }
193 
194 /**
195  * ffs - find first set bit in a word
196  * @x: the word to search
197  *
198  * This is defined the same way as the libc and compiler builtin ffs routines.
199  *
200  * ffs(value) returns 0 if value is 0 or the position of the first set bit if
201  * value is nonzero. The first (least significant) bit is at position 1.
202  */
203 #define ffs(x) (__builtin_constant_p(x) ? __builtin_ffs(x) : variable_ffs(x))
204 
205 static __always_inline int variable_fls(unsigned int x)
206 {
207 	int r;
208 
209 	if (!x)
210 		return 0;
211 
212 	asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
213 				      RISCV_ISA_EXT_ZBB, 1)
214 			  : : : : legacy);
215 
216 	asm volatile (".option push\n"
217 		      ".option arch,+zbb\n"
218 		      CLZW "%0, %1\n"
219 		      ".option pop\n"
220 		      : "=r" (r) : "r" (x) :);
221 
222 	return 32 - r;
223 
224 legacy:
225 	r = 32;
226 	if (!(x & 0xffff0000u)) {
227 		x <<= 16;
228 		r -= 16;
229 	}
230 	if (!(x & 0xff000000u)) {
231 		x <<= 8;
232 		r -= 8;
233 	}
234 	if (!(x & 0xf0000000u)) {
235 		x <<= 4;
236 		r -= 4;
237 	}
238 	if (!(x & 0xc0000000u)) {
239 		x <<= 2;
240 		r -= 2;
241 	}
242 	if (!(x & 0x80000000u)) {
243 		x <<= 1;
244 		r -= 1;
245 	}
246 	return r;
247 }
248 
249 /**
250  * fls - find last set bit in a word
251  * @x: the word to search
252  *
253  * This is defined in a similar way as ffs, but returns the position of the most
254  * significant set bit.
255  *
256  * fls(value) returns 0 if value is 0 or the position of the last set bit if
257  * value is nonzero. The last (most significant) bit is at position 32.
258  */
259 #define fls(x)							\
260 ({								\
261 	typeof(x) x_ = (x);					\
262 	__builtin_constant_p(x_) ?				\
263 	 (int)((x_ != 0) ? (32 - __builtin_clz(x_)) : 0)	\
264 	 :							\
265 	 variable_fls(x_);					\
266 })
267 
268 #endif /* !defined(CONFIG_RISCV_ISA_ZBB) || defined(NO_ALTERNATIVE) */
269 
270 #include <asm-generic/bitops/ffz.h>
271 #include <asm-generic/bitops/fls64.h>
272 #include <asm-generic/bitops/sched.h>
273 
274 #include <asm/arch_hweight.h>
275 
276 #include <asm-generic/bitops/const_hweight.h>
277 
278 #if (BITS_PER_LONG == 64)
279 #define __AMO(op)	"amo" #op ".d"
280 #elif (BITS_PER_LONG == 32)
281 #define __AMO(op)	"amo" #op ".w"
282 #else
283 #error "Unexpected BITS_PER_LONG"
284 #endif
285 
286 #define __test_and_op_bit_ord(op, mod, nr, addr, ord)		\
287 ({								\
288 	unsigned long __res, __mask;				\
289 	__mask = BIT_MASK(nr);					\
290 	__asm__ __volatile__ (					\
291 		__AMO(op) #ord " %0, %2, %1"			\
292 		: "=r" (__res), "+A" (addr[BIT_WORD(nr)])	\
293 		: "r" (mod(__mask))				\
294 		: "memory");					\
295 	((__res & __mask) != 0);				\
296 })
297 
298 #define __op_bit_ord(op, mod, nr, addr, ord)			\
299 	__asm__ __volatile__ (					\
300 		__AMO(op) #ord " zero, %1, %0"			\
301 		: "+A" (addr[BIT_WORD(nr)])			\
302 		: "r" (mod(BIT_MASK(nr)))			\
303 		: "memory");
304 
305 #define __test_and_op_bit(op, mod, nr, addr) 			\
306 	__test_and_op_bit_ord(op, mod, nr, addr, .aqrl)
307 #define __op_bit(op, mod, nr, addr)				\
308 	__op_bit_ord(op, mod, nr, addr, )
309 
310 /* Bitmask modifiers */
311 #define __NOP(x)	(x)
312 #define __NOT(x)	(~(x))
313 
314 /**
315  * test_and_set_bit - Set a bit and return its old value
316  * @nr: Bit to set
317  * @addr: Address to count from
318  *
319  * This operation may be reordered on other architectures than x86.
320  */
321 static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
322 {
323 	return __test_and_op_bit(or, __NOP, nr, addr);
324 }
325 
326 /**
327  * test_and_clear_bit - Clear a bit and return its old value
328  * @nr: Bit to clear
329  * @addr: Address to count from
330  *
331  * This operation can be reordered on other architectures other than x86.
332  */
333 static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
334 {
335 	return __test_and_op_bit(and, __NOT, nr, addr);
336 }
337 
338 /**
339  * test_and_change_bit - Change a bit and return its old value
340  * @nr: Bit to change
341  * @addr: Address to count from
342  *
343  * This operation is atomic and cannot be reordered.
344  * It also implies a memory barrier.
345  */
346 static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
347 {
348 	return __test_and_op_bit(xor, __NOP, nr, addr);
349 }
350 
351 /**
352  * set_bit - Atomically set a bit in memory
353  * @nr: the bit to set
354  * @addr: the address to start counting from
355  *
356  * Note: there are no guarantees that this function will not be reordered
357  * on non x86 architectures, so if you are writing portable code,
358  * make sure not to rely on its reordering guarantees.
359  *
360  * Note that @nr may be almost arbitrarily large; this function is not
361  * restricted to acting on a single-word quantity.
362  */
363 static inline void set_bit(int nr, volatile unsigned long *addr)
364 {
365 	__op_bit(or, __NOP, nr, addr);
366 }
367 
368 /**
369  * clear_bit - Clears a bit in memory
370  * @nr: Bit to clear
371  * @addr: Address to start counting from
372  *
373  * Note: there are no guarantees that this function will not be reordered
374  * on non x86 architectures, so if you are writing portable code,
375  * make sure not to rely on its reordering guarantees.
376  */
377 static inline void clear_bit(int nr, volatile unsigned long *addr)
378 {
379 	__op_bit(and, __NOT, nr, addr);
380 }
381 
382 /**
383  * change_bit - Toggle a bit in memory
384  * @nr: Bit to change
385  * @addr: Address to start counting from
386  *
387  * change_bit()  may be reordered on other architectures than x86.
388  * Note that @nr may be almost arbitrarily large; this function is not
389  * restricted to acting on a single-word quantity.
390  */
391 static inline void change_bit(int nr, volatile unsigned long *addr)
392 {
393 	__op_bit(xor, __NOP, nr, addr);
394 }
395 
396 /**
397  * test_and_set_bit_lock - Set a bit and return its old value, for lock
398  * @nr: Bit to set
399  * @addr: Address to count from
400  *
401  * This operation is atomic and provides acquire barrier semantics.
402  * It can be used to implement bit locks.
403  */
404 static inline int test_and_set_bit_lock(
405 	unsigned long nr, volatile unsigned long *addr)
406 {
407 	return __test_and_op_bit_ord(or, __NOP, nr, addr, .aq);
408 }
409 
410 /**
411  * clear_bit_unlock - Clear a bit in memory, for unlock
412  * @nr: the bit to set
413  * @addr: the address to start counting from
414  *
415  * This operation is atomic and provides release barrier semantics.
416  */
417 static inline void clear_bit_unlock(
418 	unsigned long nr, volatile unsigned long *addr)
419 {
420 	__op_bit_ord(and, __NOT, nr, addr, .rl);
421 }
422 
423 /**
424  * __clear_bit_unlock - Clear a bit in memory, for unlock
425  * @nr: the bit to set
426  * @addr: the address to start counting from
427  *
428  * This operation is like clear_bit_unlock, however it is not atomic.
429  * It does provide release barrier semantics so it can be used to unlock
430  * a bit lock, however it would only be used if no other CPU can modify
431  * any bits in the memory until the lock is released (a good example is
432  * if the bit lock itself protects access to the other bits in the word).
433  *
434  * On RISC-V systems there seems to be no benefit to taking advantage of the
435  * non-atomic property here: it's a lot more instructions and we still have to
436  * provide release semantics anyway.
437  */
438 static inline void __clear_bit_unlock(
439 	unsigned long nr, volatile unsigned long *addr)
440 {
441 	clear_bit_unlock(nr, addr);
442 }
443 
444 static inline bool xor_unlock_is_negative_byte(unsigned long mask,
445 		volatile unsigned long *addr)
446 {
447 	unsigned long res;
448 	__asm__ __volatile__ (
449 		__AMO(xor) ".rl %0, %2, %1"
450 		: "=r" (res), "+A" (*addr)
451 		: "r" (__NOP(mask))
452 		: "memory");
453 	return (res & BIT(7)) != 0;
454 }
455 
456 #undef __test_and_op_bit
457 #undef __op_bit
458 #undef __NOP
459 #undef __NOT
460 #undef __AMO
461 
462 #include <asm-generic/bitops/non-atomic.h>
463 #include <asm-generic/bitops/le.h>
464 #include <asm-generic/bitops/ext2-atomic.h>
465 
466 #endif /* _ASM_RISCV_BITOPS_H */
467