xref: /linux/arch/x86/include/asm/atomic.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 #ifndef _ASM_X86_ATOMIC_H
2 #define _ASM_X86_ATOMIC_H
3 
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/alternative.h>
8 #include <asm/cmpxchg.h>
9 #include <asm/rmwcc.h>
10 #include <asm/barrier.h>
11 
12 /*
13  * Atomic operations that C can't guarantee us.  Useful for
14  * resource counting etc..
15  */
16 
17 #define ATOMIC_INIT(i)	{ (i) }
18 
19 /**
20  * atomic_read - read atomic variable
21  * @v: pointer of type atomic_t
22  *
23  * Atomically reads the value of @v.
24  */
25 static __always_inline int atomic_read(const atomic_t *v)
26 {
27 	return ACCESS_ONCE((v)->counter);
28 }
29 
30 /**
31  * atomic_set - set atomic variable
32  * @v: pointer of type atomic_t
33  * @i: required value
34  *
35  * Atomically sets the value of @v to @i.
36  */
37 static __always_inline void atomic_set(atomic_t *v, int i)
38 {
39 	v->counter = i;
40 }
41 
42 /**
43  * atomic_add - add integer to atomic variable
44  * @i: integer value to add
45  * @v: pointer of type atomic_t
46  *
47  * Atomically adds @i to @v.
48  */
49 static __always_inline void atomic_add(int i, atomic_t *v)
50 {
51 	asm volatile(LOCK_PREFIX "addl %1,%0"
52 		     : "+m" (v->counter)
53 		     : "ir" (i));
54 }
55 
56 /**
57  * atomic_sub - subtract integer from atomic variable
58  * @i: integer value to subtract
59  * @v: pointer of type atomic_t
60  *
61  * Atomically subtracts @i from @v.
62  */
63 static __always_inline void atomic_sub(int i, atomic_t *v)
64 {
65 	asm volatile(LOCK_PREFIX "subl %1,%0"
66 		     : "+m" (v->counter)
67 		     : "ir" (i));
68 }
69 
70 /**
71  * atomic_sub_and_test - subtract value from variable and test result
72  * @i: integer value to subtract
73  * @v: pointer of type atomic_t
74  *
75  * Atomically subtracts @i from @v and returns
76  * true if the result is zero, or false for all
77  * other cases.
78  */
79 static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
80 {
81 	GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
82 }
83 
84 /**
85  * atomic_inc - increment atomic variable
86  * @v: pointer of type atomic_t
87  *
88  * Atomically increments @v by 1.
89  */
90 static __always_inline void atomic_inc(atomic_t *v)
91 {
92 	asm volatile(LOCK_PREFIX "incl %0"
93 		     : "+m" (v->counter));
94 }
95 
96 /**
97  * atomic_dec - decrement atomic variable
98  * @v: pointer of type atomic_t
99  *
100  * Atomically decrements @v by 1.
101  */
102 static __always_inline void atomic_dec(atomic_t *v)
103 {
104 	asm volatile(LOCK_PREFIX "decl %0"
105 		     : "+m" (v->counter));
106 }
107 
108 /**
109  * atomic_dec_and_test - decrement and test
110  * @v: pointer of type atomic_t
111  *
112  * Atomically decrements @v by 1 and
113  * returns true if the result is 0, or false for all other
114  * cases.
115  */
116 static __always_inline int atomic_dec_and_test(atomic_t *v)
117 {
118 	GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
119 }
120 
121 /**
122  * atomic_inc_and_test - increment and test
123  * @v: pointer of type atomic_t
124  *
125  * Atomically increments @v by 1
126  * and returns true if the result is zero, or false for all
127  * other cases.
128  */
129 static __always_inline int atomic_inc_and_test(atomic_t *v)
130 {
131 	GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
132 }
133 
134 /**
135  * atomic_add_negative - add and test if negative
136  * @i: integer value to add
137  * @v: pointer of type atomic_t
138  *
139  * Atomically adds @i to @v and returns true
140  * if the result is negative, or false when
141  * result is greater than or equal to zero.
142  */
143 static __always_inline int atomic_add_negative(int i, atomic_t *v)
144 {
145 	GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
146 }
147 
148 /**
149  * atomic_add_return - add integer and return
150  * @i: integer value to add
151  * @v: pointer of type atomic_t
152  *
153  * Atomically adds @i to @v and returns @i + @v
154  */
155 static __always_inline int atomic_add_return(int i, atomic_t *v)
156 {
157 	return i + xadd(&v->counter, i);
158 }
159 
160 /**
161  * atomic_sub_return - subtract integer and return
162  * @v: pointer of type atomic_t
163  * @i: integer value to subtract
164  *
165  * Atomically subtracts @i from @v and returns @v - @i
166  */
167 static __always_inline int atomic_sub_return(int i, atomic_t *v)
168 {
169 	return atomic_add_return(-i, v);
170 }
171 
172 #define atomic_inc_return(v)  (atomic_add_return(1, v))
173 #define atomic_dec_return(v)  (atomic_sub_return(1, v))
174 
175 static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
176 {
177 	return cmpxchg(&v->counter, old, new);
178 }
179 
180 static inline int atomic_xchg(atomic_t *v, int new)
181 {
182 	return xchg(&v->counter, new);
183 }
184 
185 #define ATOMIC_OP(op)							\
186 static inline void atomic_##op(int i, atomic_t *v)			\
187 {									\
188 	asm volatile(LOCK_PREFIX #op"l %1,%0"				\
189 			: "+m" (v->counter)				\
190 			: "ir" (i)					\
191 			: "memory");					\
192 }
193 
194 ATOMIC_OP(and)
195 ATOMIC_OP(or)
196 ATOMIC_OP(xor)
197 
198 #undef ATOMIC_OP
199 
200 /**
201  * __atomic_add_unless - add unless the number is already a given value
202  * @v: pointer of type atomic_t
203  * @a: the amount to add to v...
204  * @u: ...unless v is equal to u.
205  *
206  * Atomically adds @a to @v, so long as @v was not already @u.
207  * Returns the old value of @v.
208  */
209 static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
210 {
211 	int c, old;
212 	c = atomic_read(v);
213 	for (;;) {
214 		if (unlikely(c == (u)))
215 			break;
216 		old = atomic_cmpxchg((v), c, c + (a));
217 		if (likely(old == c))
218 			break;
219 		c = old;
220 	}
221 	return c;
222 }
223 
224 /**
225  * atomic_inc_short - increment of a short integer
226  * @v: pointer to type int
227  *
228  * Atomically adds 1 to @v
229  * Returns the new value of @u
230  */
231 static __always_inline short int atomic_inc_short(short int *v)
232 {
233 	asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
234 	return *v;
235 }
236 
237 #ifdef CONFIG_X86_32
238 # include <asm/atomic64_32.h>
239 #else
240 # include <asm/atomic64_64.h>
241 #endif
242 
243 #endif /* _ASM_X86_ATOMIC_H */
244