xref: /linux/arch/mips/include/asm/local.h (revision a06c3fad49a50d5d5eb078f93e70f4d3eca5d5a5)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ARCH_MIPS_LOCAL_H
3 #define _ARCH_MIPS_LOCAL_H
4 
5 #include <linux/percpu.h>
6 #include <linux/bitops.h>
7 #include <linux/atomic.h>
8 #include <asm/asm.h>
9 #include <asm/cmpxchg.h>
10 #include <asm/compiler.h>
11 
12 typedef struct
13 {
14 	atomic_long_t a;
15 } local_t;
16 
17 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
18 
19 #define local_read(l)	atomic_long_read(&(l)->a)
20 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
21 
22 #define local_add(i, l) atomic_long_add((i), (&(l)->a))
23 #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24 #define local_inc(l)	atomic_long_inc(&(l)->a)
25 #define local_dec(l)	atomic_long_dec(&(l)->a)
26 
27 /*
28  * Same as above, but return the result value
29  */
30 static __inline__ long local_add_return(long i, local_t * l)
31 {
32 	unsigned long result;
33 
34 	if (kernel_uses_llsc) {
35 		unsigned long temp;
36 
37 		__asm__ __volatile__(
38 		"	.set	push					\n"
39 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
40 			__SYNC(full, loongson3_war) "                   \n"
41 		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
42 			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
43 			__stringify(LONG_SC)	"	%0, %2		\n"
44 			__stringify(SC_BEQZ)	"	%0, 1b		\n"
45 			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
46 		"	.set	pop					\n"
47 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
48 		: "Ir" (i), "m" (l->a.counter)
49 		: "memory");
50 	} else {
51 		unsigned long flags;
52 
53 		local_irq_save(flags);
54 		result = l->a.counter;
55 		result += i;
56 		l->a.counter = result;
57 		local_irq_restore(flags);
58 	}
59 
60 	return result;
61 }
62 
63 static __inline__ long local_sub_return(long i, local_t * l)
64 {
65 	unsigned long result;
66 
67 	if (kernel_uses_llsc) {
68 		unsigned long temp;
69 
70 		__asm__ __volatile__(
71 		"	.set	push					\n"
72 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
73 			__SYNC(full, loongson3_war) "                   \n"
74 		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
75 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
76 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
77 			__stringify(LONG_SC)	"	%0, %2		\n"
78 			__stringify(SC_BEQZ)	"	%0, 1b		\n"
79 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
80 		"	.set	pop					\n"
81 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
82 		: "Ir" (i), "m" (l->a.counter)
83 		: "memory");
84 	} else {
85 		unsigned long flags;
86 
87 		local_irq_save(flags);
88 		result = l->a.counter;
89 		result -= i;
90 		l->a.counter = result;
91 		local_irq_restore(flags);
92 	}
93 
94 	return result;
95 }
96 
97 static __inline__ long local_cmpxchg(local_t *l, long old, long new)
98 {
99 	return cmpxchg_local(&l->a.counter, old, new);
100 }
101 
102 static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
103 {
104 	return try_cmpxchg_local(&l->a.counter,
105 				 (typeof(l->a.counter) *) old, new);
106 }
107 
108 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
109 
110 /**
111  * local_add_unless - add unless the number is already a given value
112  * @l: pointer of type local_t
113  * @a: the amount to add to l...
114  * @u: ...unless l is equal to u.
115  *
116  * Atomically adds @a to @l, if @v was not already @u.
117  * Returns true if the addition was done.
118  */
119 static __inline__ bool
120 local_add_unless(local_t *l, long a, long u)
121 {
122 	long c = local_read(l);
123 
124 	do {
125 		if (unlikely(c == u))
126 			return false;
127 	} while (!local_try_cmpxchg(l, &c, c + a));
128 
129 	return true;
130 }
131 
132 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
133 
134 #define local_dec_return(l) local_sub_return(1, (l))
135 #define local_inc_return(l) local_add_return(1, (l))
136 
137 /*
138  * local_sub_and_test - subtract value from variable and test result
139  * @i: integer value to subtract
140  * @l: pointer of type local_t
141  *
142  * Atomically subtracts @i from @l and returns
143  * true if the result is zero, or false for all
144  * other cases.
145  */
146 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
147 
148 /*
149  * local_inc_and_test - increment and test
150  * @l: pointer of type local_t
151  *
152  * Atomically increments @l by 1
153  * and returns true if the result is zero, or false for all
154  * other cases.
155  */
156 #define local_inc_and_test(l) (local_inc_return(l) == 0)
157 
158 /*
159  * local_dec_and_test - decrement by 1 and test
160  * @l: pointer of type local_t
161  *
162  * Atomically decrements @l by 1 and
163  * returns true if the result is 0, or false for all other
164  * cases.
165  */
166 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
167 
168 /*
169  * local_add_negative - add and test if negative
170  * @l: pointer of type local_t
171  * @i: integer value to add
172  *
173  * Atomically adds @i to @l and returns true
174  * if the result is negative, or false when
175  * result is greater than or equal to zero.
176  */
177 #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
178 
179 /* Use these for per-cpu local_t variables: on some archs they are
180  * much more efficient than these naive implementations.  Note they take
181  * a variable, not an address.
182  */
183 
184 #define __local_inc(l)		((l)->a.counter++)
185 #define __local_dec(l)		((l)->a.counter++)
186 #define __local_add(i, l)	((l)->a.counter+=(i))
187 #define __local_sub(i, l)	((l)->a.counter-=(i))
188 
189 #endif /* _ARCH_MIPS_LOCAL_H */
190