xref: /linux/arch/mips/include/asm/local.h (revision b7019ac550eb3916f34d79db583e9b7ea2524afa)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ARCH_MIPS_LOCAL_H
3 #define _ARCH_MIPS_LOCAL_H
4 
5 #include <linux/percpu.h>
6 #include <linux/bitops.h>
7 #include <linux/atomic.h>
8 #include <asm/cmpxchg.h>
9 #include <asm/compiler.h>
10 #include <asm/war.h>
11 
12 typedef struct
13 {
14 	atomic_long_t a;
15 } local_t;
16 
17 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
18 
19 #define local_read(l)	atomic_long_read(&(l)->a)
20 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
21 
22 #define local_add(i, l) atomic_long_add((i), (&(l)->a))
23 #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24 #define local_inc(l)	atomic_long_inc(&(l)->a)
25 #define local_dec(l)	atomic_long_dec(&(l)->a)
26 
27 /*
28  * Same as above, but return the result value
29  */
30 static __inline__ long local_add_return(long i, local_t * l)
31 {
32 	unsigned long result;
33 
34 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
35 		unsigned long temp;
36 
37 		__asm__ __volatile__(
38 		"	.set	push					\n"
39 		"	.set	arch=r4000				\n"
40 		"1:"	__LL	"%1, %2		# local_add_return	\n"
41 		"	addu	%0, %1, %3				\n"
42 			__SC	"%0, %2					\n"
43 		"	beqzl	%0, 1b					\n"
44 		"	addu	%0, %1, %3				\n"
45 		"	.set	pop					\n"
46 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
47 		: "Ir" (i), "m" (l->a.counter)
48 		: "memory");
49 	} else if (kernel_uses_llsc) {
50 		unsigned long temp;
51 
52 		__asm__ __volatile__(
53 		"	.set	push					\n"
54 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
55 		"1:"	__LL	"%1, %2		# local_add_return	\n"
56 		"	addu	%0, %1, %3				\n"
57 			__SC	"%0, %2					\n"
58 		"	beqz	%0, 1b					\n"
59 		"	addu	%0, %1, %3				\n"
60 		"	.set	pop					\n"
61 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
62 		: "Ir" (i), "m" (l->a.counter)
63 		: "memory");
64 	} else {
65 		unsigned long flags;
66 
67 		local_irq_save(flags);
68 		result = l->a.counter;
69 		result += i;
70 		l->a.counter = result;
71 		local_irq_restore(flags);
72 	}
73 
74 	return result;
75 }
76 
77 static __inline__ long local_sub_return(long i, local_t * l)
78 {
79 	unsigned long result;
80 
81 	if (kernel_uses_llsc && R10000_LLSC_WAR) {
82 		unsigned long temp;
83 
84 		__asm__ __volatile__(
85 		"	.set	push					\n"
86 		"	.set	arch=r4000				\n"
87 		"1:"	__LL	"%1, %2		# local_sub_return	\n"
88 		"	subu	%0, %1, %3				\n"
89 			__SC	"%0, %2					\n"
90 		"	beqzl	%0, 1b					\n"
91 		"	subu	%0, %1, %3				\n"
92 		"	.set	pop					\n"
93 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
94 		: "Ir" (i), "m" (l->a.counter)
95 		: "memory");
96 	} else if (kernel_uses_llsc) {
97 		unsigned long temp;
98 
99 		__asm__ __volatile__(
100 		"	.set	push					\n"
101 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
102 		"1:"	__LL	"%1, %2		# local_sub_return	\n"
103 		"	subu	%0, %1, %3				\n"
104 			__SC	"%0, %2					\n"
105 		"	beqz	%0, 1b					\n"
106 		"	subu	%0, %1, %3				\n"
107 		"	.set	pop					\n"
108 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
109 		: "Ir" (i), "m" (l->a.counter)
110 		: "memory");
111 	} else {
112 		unsigned long flags;
113 
114 		local_irq_save(flags);
115 		result = l->a.counter;
116 		result -= i;
117 		l->a.counter = result;
118 		local_irq_restore(flags);
119 	}
120 
121 	return result;
122 }
123 
124 #define local_cmpxchg(l, o, n) \
125 	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
126 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
127 
128 /**
129  * local_add_unless - add unless the number is a given value
130  * @l: pointer of type local_t
131  * @a: the amount to add to l...
132  * @u: ...unless l is equal to u.
133  *
134  * Atomically adds @a to @l, so long as it was not @u.
135  * Returns non-zero if @l was not @u, and zero otherwise.
136  */
137 #define local_add_unless(l, a, u)				\
138 ({								\
139 	long c, old;						\
140 	c = local_read(l);					\
141 	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
142 		c = old;					\
143 	c != (u);						\
144 })
145 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
146 
147 #define local_dec_return(l) local_sub_return(1, (l))
148 #define local_inc_return(l) local_add_return(1, (l))
149 
150 /*
151  * local_sub_and_test - subtract value from variable and test result
152  * @i: integer value to subtract
153  * @l: pointer of type local_t
154  *
155  * Atomically subtracts @i from @l and returns
156  * true if the result is zero, or false for all
157  * other cases.
158  */
159 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
160 
161 /*
162  * local_inc_and_test - increment and test
163  * @l: pointer of type local_t
164  *
165  * Atomically increments @l by 1
166  * and returns true if the result is zero, or false for all
167  * other cases.
168  */
169 #define local_inc_and_test(l) (local_inc_return(l) == 0)
170 
171 /*
172  * local_dec_and_test - decrement by 1 and test
173  * @l: pointer of type local_t
174  *
175  * Atomically decrements @l by 1 and
176  * returns true if the result is 0, or false for all other
177  * cases.
178  */
179 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
180 
181 /*
182  * local_add_negative - add and test if negative
183  * @l: pointer of type local_t
184  * @i: integer value to add
185  *
186  * Atomically adds @i to @l and returns true
187  * if the result is negative, or false when
188  * result is greater than or equal to zero.
189  */
190 #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
191 
192 /* Use these for per-cpu local_t variables: on some archs they are
193  * much more efficient than these naive implementations.  Note they take
194  * a variable, not an address.
195  */
196 
197 #define __local_inc(l)		((l)->a.counter++)
198 #define __local_dec(l)		((l)->a.counter++)
199 #define __local_add(i, l)	((l)->a.counter+=(i))
200 #define __local_sub(i, l)	((l)->a.counter-=(i))
201 
202 #endif /* _ARCH_MIPS_LOCAL_H */
203