xref: /linux/arch/sh/include/asm/bitops-llsc.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef __ASM_SH_BITOPS_LLSC_H
2 #define __ASM_SH_BITOPS_LLSC_H
3 
4 static inline void set_bit(int nr, volatile void *addr)
5 {
6 	int	mask;
7 	volatile unsigned int *a = addr;
8 	unsigned long tmp;
9 
10 	a += nr >> 5;
11 	mask = 1 << (nr & 0x1f);
12 
13 	__asm__ __volatile__ (
14 		"1:						\n\t"
15 		"movli.l	@%1, %0	! set_bit		\n\t"
16 		"or		%2, %0				\n\t"
17 		"movco.l	%0, @%1				\n\t"
18 		"bf		1b				\n\t"
19 		: "=&z" (tmp)
20 		: "r" (a), "r" (mask)
21 		: "t", "memory"
22 	);
23 }
24 
25 static inline void clear_bit(int nr, volatile void *addr)
26 {
27 	int	mask;
28 	volatile unsigned int *a = addr;
29 	unsigned long tmp;
30 
31 	a += nr >> 5;
32 	mask = 1 << (nr & 0x1f);
33 
34 	__asm__ __volatile__ (
35 		"1:						\n\t"
36 		"movli.l	@%1, %0	! clear_bit		\n\t"
37 		"and		%2, %0				\n\t"
38 		"movco.l	%0, @%1				\n\t"
39 		"bf		1b				\n\t"
40 		: "=&z" (tmp)
41 		: "r" (a), "r" (~mask)
42 		: "t", "memory"
43 	);
44 }
45 
46 static inline void change_bit(int nr, volatile void *addr)
47 {
48 	int	mask;
49 	volatile unsigned int *a = addr;
50 	unsigned long tmp;
51 
52 	a += nr >> 5;
53 	mask = 1 << (nr & 0x1f);
54 
55 	__asm__ __volatile__ (
56 		"1:						\n\t"
57 		"movli.l	@%1, %0	! change_bit		\n\t"
58 		"xor		%2, %0				\n\t"
59 		"movco.l	%0, @%1				\n\t"
60 		"bf		1b				\n\t"
61 		: "=&z" (tmp)
62 		: "r" (a), "r" (mask)
63 		: "t", "memory"
64 	);
65 }
66 
67 static inline int test_and_set_bit(int nr, volatile void *addr)
68 {
69 	int	mask, retval;
70 	volatile unsigned int *a = addr;
71 	unsigned long tmp;
72 
73 	a += nr >> 5;
74 	mask = 1 << (nr & 0x1f);
75 
76 	__asm__ __volatile__ (
77 		"1:						\n\t"
78 		"movli.l	@%2, %0	! test_and_set_bit	\n\t"
79 		"mov		%0, %1				\n\t"
80 		"or		%3, %0				\n\t"
81 		"movco.l	%0, @%2				\n\t"
82 		"bf		1b				\n\t"
83 		"and		%3, %1				\n\t"
84 		: "=&z" (tmp), "=&r" (retval)
85 		: "r" (a), "r" (mask)
86 		: "t", "memory"
87 	);
88 
89 	return retval != 0;
90 }
91 
92 static inline int test_and_clear_bit(int nr, volatile void *addr)
93 {
94 	int	mask, retval;
95 	volatile unsigned int *a = addr;
96 	unsigned long tmp;
97 
98 	a += nr >> 5;
99 	mask = 1 << (nr & 0x1f);
100 
101 	__asm__ __volatile__ (
102 		"1:						\n\t"
103 		"movli.l	@%2, %0	! test_and_clear_bit	\n\t"
104 		"mov		%0, %1				\n\t"
105 		"and		%4, %0				\n\t"
106 		"movco.l	%0, @%2				\n\t"
107 		"bf		1b				\n\t"
108 		"and		%3, %1				\n\t"
109 		"synco						\n\t"
110 		: "=&z" (tmp), "=&r" (retval)
111 		: "r" (a), "r" (mask), "r" (~mask)
112 		: "t", "memory"
113 	);
114 
115 	return retval != 0;
116 }
117 
118 static inline int test_and_change_bit(int nr, volatile void *addr)
119 {
120 	int	mask, retval;
121 	volatile unsigned int *a = addr;
122 	unsigned long tmp;
123 
124 	a += nr >> 5;
125 	mask = 1 << (nr & 0x1f);
126 
127 	__asm__ __volatile__ (
128 		"1:						\n\t"
129 		"movli.l	@%2, %0	! test_and_change_bit	\n\t"
130 		"mov		%0, %1				\n\t"
131 		"xor		%3, %0				\n\t"
132 		"movco.l	%0, @%2				\n\t"
133 		"bf		1b				\n\t"
134 		"and		%3, %1				\n\t"
135 		"synco						\n\t"
136 		: "=&z" (tmp), "=&r" (retval)
137 		: "r" (a), "r" (mask)
138 		: "t", "memory"
139 	);
140 
141 	return retval != 0;
142 }
143 
144 #include <asm-generic/bitops/non-atomic.h>
145 
146 #endif /* __ASM_SH_BITOPS_LLSC_H */
147