xref: /linux/include/asm-generic/bitops/atomic.h (revision 561add0da6d3d07c9bccb0832fb6ed5619167d26)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
3 #define _ASM_GENERIC_BITOPS_ATOMIC_H_
4 
5 #include <linux/atomic.h>
6 #include <linux/compiler.h>
7 #include <asm/barrier.h>
8 
9 /*
10  * Implementation of atomic bitops using atomic-fetch ops.
11  * See Documentation/atomic_bitops.txt for details.
12  */
13 
14 static __always_inline void
15 arch_set_bit(unsigned int nr, volatile unsigned long *p)
16 {
17 	p += BIT_WORD(nr);
18 	raw_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
19 }
20 
21 static __always_inline void
22 arch_clear_bit(unsigned int nr, volatile unsigned long *p)
23 {
24 	p += BIT_WORD(nr);
25 	raw_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
26 }
27 
28 static __always_inline void
29 arch_change_bit(unsigned int nr, volatile unsigned long *p)
30 {
31 	p += BIT_WORD(nr);
32 	raw_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
33 }
34 
35 static __always_inline int
36 arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
37 {
38 	long old;
39 	unsigned long mask = BIT_MASK(nr);
40 
41 	p += BIT_WORD(nr);
42 	old = raw_atomic_long_fetch_or(mask, (atomic_long_t *)p);
43 	return !!(old & mask);
44 }
45 
46 static __always_inline int
47 arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
48 {
49 	long old;
50 	unsigned long mask = BIT_MASK(nr);
51 
52 	p += BIT_WORD(nr);
53 	old = raw_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
54 	return !!(old & mask);
55 }
56 
57 static __always_inline int
58 arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
59 {
60 	long old;
61 	unsigned long mask = BIT_MASK(nr);
62 
63 	p += BIT_WORD(nr);
64 	old = raw_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
65 	return !!(old & mask);
66 }
67 
68 #include <asm-generic/bitops/instrumented-atomic.h>
69 
70 #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */
71