xref: /linux/arch/mips/include/asm/local.h (revision f9bff0e31881d03badf191d3b0005839391f5f2b)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ARCH_MIPS_LOCAL_H
3 #define _ARCH_MIPS_LOCAL_H
4 
5 #include <linux/percpu.h>
6 #include <linux/bitops.h>
7 #include <linux/atomic.h>
8 #include <asm/asm.h>
9 #include <asm/cmpxchg.h>
10 #include <asm/compiler.h>
11 
12 typedef struct
13 {
14 	atomic_long_t a;
15 } local_t;
16 
17 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
18 
19 #define local_read(l)	atomic_long_read(&(l)->a)
20 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
21 
22 #define local_add(i, l) atomic_long_add((i), (&(l)->a))
23 #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
24 #define local_inc(l)	atomic_long_inc(&(l)->a)
25 #define local_dec(l)	atomic_long_dec(&(l)->a)
26 
27 /*
28  * Same as above, but return the result value
29  */
30 static __inline__ long local_add_return(long i, local_t * l)
31 {
32 	unsigned long result;
33 
34 	if (kernel_uses_llsc) {
35 		unsigned long temp;
36 
37 		__asm__ __volatile__(
38 		"	.set	push					\n"
39 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
40 			__SYNC(full, loongson3_war) "                   \n"
41 		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
42 			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
43 			__stringify(LONG_SC)	"	%0, %2		\n"
44 			__stringify(SC_BEQZ)	"	%0, 1b		\n"
45 			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
46 		"	.set	pop					\n"
47 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
48 		: "Ir" (i), "m" (l->a.counter)
49 		: "memory");
50 	} else {
51 		unsigned long flags;
52 
53 		local_irq_save(flags);
54 		result = l->a.counter;
55 		result += i;
56 		l->a.counter = result;
57 		local_irq_restore(flags);
58 	}
59 
60 	return result;
61 }
62 
63 static __inline__ long local_sub_return(long i, local_t * l)
64 {
65 	unsigned long result;
66 
67 	if (kernel_uses_llsc) {
68 		unsigned long temp;
69 
70 		__asm__ __volatile__(
71 		"	.set	push					\n"
72 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
73 			__SYNC(full, loongson3_war) "                   \n"
74 		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
75 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
76 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
77 			__stringify(LONG_SC)	"	%0, %2		\n"
78 			__stringify(SC_BEQZ)	"	%0, 1b		\n"
79 			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
80 		"	.set	pop					\n"
81 		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
82 		: "Ir" (i), "m" (l->a.counter)
83 		: "memory");
84 	} else {
85 		unsigned long flags;
86 
87 		local_irq_save(flags);
88 		result = l->a.counter;
89 		result -= i;
90 		l->a.counter = result;
91 		local_irq_restore(flags);
92 	}
93 
94 	return result;
95 }
96 
97 static __inline__ long local_cmpxchg(local_t *l, long old, long new)
98 {
99 	return cmpxchg_local(&l->a.counter, old, new);
100 }
101 
102 static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
103 {
104 	typeof(l->a.counter) *__old = (typeof(l->a.counter) *) old;
105 	return try_cmpxchg_local(&l->a.counter, __old, new);
106 }
107 
108 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
109 
110 /**
111  * local_add_unless - add unless the number is a given value
112  * @l: pointer of type local_t
113  * @a: the amount to add to l...
114  * @u: ...unless l is equal to u.
115  *
116  * Atomically adds @a to @l, so long as it was not @u.
117  * Returns non-zero if @l was not @u, and zero otherwise.
118  */
119 #define local_add_unless(l, a, u)				\
120 ({								\
121 	long c, old;						\
122 	c = local_read(l);					\
123 	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
124 		c = old;					\
125 	c != (u);						\
126 })
127 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
128 
129 #define local_dec_return(l) local_sub_return(1, (l))
130 #define local_inc_return(l) local_add_return(1, (l))
131 
132 /*
133  * local_sub_and_test - subtract value from variable and test result
134  * @i: integer value to subtract
135  * @l: pointer of type local_t
136  *
137  * Atomically subtracts @i from @l and returns
138  * true if the result is zero, or false for all
139  * other cases.
140  */
141 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
142 
143 /*
144  * local_inc_and_test - increment and test
145  * @l: pointer of type local_t
146  *
147  * Atomically increments @l by 1
148  * and returns true if the result is zero, or false for all
149  * other cases.
150  */
151 #define local_inc_and_test(l) (local_inc_return(l) == 0)
152 
153 /*
154  * local_dec_and_test - decrement by 1 and test
155  * @l: pointer of type local_t
156  *
157  * Atomically decrements @l by 1 and
158  * returns true if the result is 0, or false for all other
159  * cases.
160  */
161 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
162 
163 /*
164  * local_add_negative - add and test if negative
165  * @l: pointer of type local_t
166  * @i: integer value to add
167  *
168  * Atomically adds @i to @l and returns true
169  * if the result is negative, or false when
170  * result is greater than or equal to zero.
171  */
172 #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
173 
174 /* Use these for per-cpu local_t variables: on some archs they are
175  * much more efficient than these naive implementations.  Note they take
176  * a variable, not an address.
177  */
178 
179 #define __local_inc(l)		((l)->a.counter++)
180 #define __local_dec(l)		((l)->a.counter++)
181 #define __local_add(i, l)	((l)->a.counter+=(i))
182 #define __local_sub(i, l)	((l)->a.counter-=(i))
183 
184 #endif /* _ARCH_MIPS_LOCAL_H */
185