xref: /linux/arch/x86/include/asm/atomic64_64.h (revision b24413180f5600bcb3bb70fbed5cf186b60864bd)
1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
21a3b1d89SBrian Gerst #ifndef _ASM_X86_ATOMIC64_64_H
31a3b1d89SBrian Gerst #define _ASM_X86_ATOMIC64_64_H
41a3b1d89SBrian Gerst 
51a3b1d89SBrian Gerst #include <linux/types.h>
61a3b1d89SBrian Gerst #include <asm/alternative.h>
71a3b1d89SBrian Gerst #include <asm/cmpxchg.h>
81a3b1d89SBrian Gerst 
91a3b1d89SBrian Gerst /* The 64-bit atomic type */
101a3b1d89SBrian Gerst 
111a3b1d89SBrian Gerst #define ATOMIC64_INIT(i)	{ (i) }
121a3b1d89SBrian Gerst 
131a3b1d89SBrian Gerst /**
141a3b1d89SBrian Gerst  * atomic64_read - read atomic64 variable
151a3b1d89SBrian Gerst  * @v: pointer of type atomic64_t
161a3b1d89SBrian Gerst  *
171a3b1d89SBrian Gerst  * Atomically reads the value of @v.
181a3b1d89SBrian Gerst  * Doesn't imply a read memory barrier.
191a3b1d89SBrian Gerst  */
201a3b1d89SBrian Gerst static inline long atomic64_read(const atomic64_t *v)
211a3b1d89SBrian Gerst {
2262e8a325SPeter Zijlstra 	return READ_ONCE((v)->counter);
231a3b1d89SBrian Gerst }
241a3b1d89SBrian Gerst 
251a3b1d89SBrian Gerst /**
261a3b1d89SBrian Gerst  * atomic64_set - set atomic64 variable
271a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
281a3b1d89SBrian Gerst  * @i: required value
291a3b1d89SBrian Gerst  *
301a3b1d89SBrian Gerst  * Atomically sets the value of @v to @i.
311a3b1d89SBrian Gerst  */
321a3b1d89SBrian Gerst static inline void atomic64_set(atomic64_t *v, long i)
331a3b1d89SBrian Gerst {
3462e8a325SPeter Zijlstra 	WRITE_ONCE(v->counter, i);
351a3b1d89SBrian Gerst }
361a3b1d89SBrian Gerst 
371a3b1d89SBrian Gerst /**
381a3b1d89SBrian Gerst  * atomic64_add - add integer to atomic64 variable
391a3b1d89SBrian Gerst  * @i: integer value to add
401a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
411a3b1d89SBrian Gerst  *
421a3b1d89SBrian Gerst  * Atomically adds @i to @v.
431a3b1d89SBrian Gerst  */
443462bd2aSHagen Paul Pfeifer static __always_inline void atomic64_add(long i, atomic64_t *v)
451a3b1d89SBrian Gerst {
461a3b1d89SBrian Gerst 	asm volatile(LOCK_PREFIX "addq %1,%0"
471a3b1d89SBrian Gerst 		     : "=m" (v->counter)
481a3b1d89SBrian Gerst 		     : "er" (i), "m" (v->counter));
491a3b1d89SBrian Gerst }
501a3b1d89SBrian Gerst 
511a3b1d89SBrian Gerst /**
521a3b1d89SBrian Gerst  * atomic64_sub - subtract the atomic64 variable
531a3b1d89SBrian Gerst  * @i: integer value to subtract
541a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
551a3b1d89SBrian Gerst  *
561a3b1d89SBrian Gerst  * Atomically subtracts @i from @v.
571a3b1d89SBrian Gerst  */
581a3b1d89SBrian Gerst static inline void atomic64_sub(long i, atomic64_t *v)
591a3b1d89SBrian Gerst {
601a3b1d89SBrian Gerst 	asm volatile(LOCK_PREFIX "subq %1,%0"
611a3b1d89SBrian Gerst 		     : "=m" (v->counter)
621a3b1d89SBrian Gerst 		     : "er" (i), "m" (v->counter));
631a3b1d89SBrian Gerst }
641a3b1d89SBrian Gerst 
651a3b1d89SBrian Gerst /**
661a3b1d89SBrian Gerst  * atomic64_sub_and_test - subtract value from variable and test result
671a3b1d89SBrian Gerst  * @i: integer value to subtract
681a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
691a3b1d89SBrian Gerst  *
701a3b1d89SBrian Gerst  * Atomically subtracts @i from @v and returns
711a3b1d89SBrian Gerst  * true if the result is zero, or false for all
721a3b1d89SBrian Gerst  * other cases.
731a3b1d89SBrian Gerst  */
74117780eeSH. Peter Anvin static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
751a3b1d89SBrian Gerst {
7618fe5822SH. Peter Anvin 	GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
771a3b1d89SBrian Gerst }
781a3b1d89SBrian Gerst 
791a3b1d89SBrian Gerst /**
801a3b1d89SBrian Gerst  * atomic64_inc - increment atomic64 variable
811a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
821a3b1d89SBrian Gerst  *
831a3b1d89SBrian Gerst  * Atomically increments @v by 1.
841a3b1d89SBrian Gerst  */
853462bd2aSHagen Paul Pfeifer static __always_inline void atomic64_inc(atomic64_t *v)
861a3b1d89SBrian Gerst {
871a3b1d89SBrian Gerst 	asm volatile(LOCK_PREFIX "incq %0"
881a3b1d89SBrian Gerst 		     : "=m" (v->counter)
891a3b1d89SBrian Gerst 		     : "m" (v->counter));
901a3b1d89SBrian Gerst }
911a3b1d89SBrian Gerst 
921a3b1d89SBrian Gerst /**
931a3b1d89SBrian Gerst  * atomic64_dec - decrement atomic64 variable
941a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
951a3b1d89SBrian Gerst  *
961a3b1d89SBrian Gerst  * Atomically decrements @v by 1.
971a3b1d89SBrian Gerst  */
983462bd2aSHagen Paul Pfeifer static __always_inline void atomic64_dec(atomic64_t *v)
991a3b1d89SBrian Gerst {
1001a3b1d89SBrian Gerst 	asm volatile(LOCK_PREFIX "decq %0"
1011a3b1d89SBrian Gerst 		     : "=m" (v->counter)
1021a3b1d89SBrian Gerst 		     : "m" (v->counter));
1031a3b1d89SBrian Gerst }
1041a3b1d89SBrian Gerst 
1051a3b1d89SBrian Gerst /**
1061a3b1d89SBrian Gerst  * atomic64_dec_and_test - decrement and test
1071a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
1081a3b1d89SBrian Gerst  *
1091a3b1d89SBrian Gerst  * Atomically decrements @v by 1 and
1101a3b1d89SBrian Gerst  * returns true if the result is 0, or false for all other
1111a3b1d89SBrian Gerst  * cases.
1121a3b1d89SBrian Gerst  */
113117780eeSH. Peter Anvin static inline bool atomic64_dec_and_test(atomic64_t *v)
1141a3b1d89SBrian Gerst {
11518fe5822SH. Peter Anvin 	GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
1161a3b1d89SBrian Gerst }
1171a3b1d89SBrian Gerst 
1181a3b1d89SBrian Gerst /**
1191a3b1d89SBrian Gerst  * atomic64_inc_and_test - increment and test
1201a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
1211a3b1d89SBrian Gerst  *
1221a3b1d89SBrian Gerst  * Atomically increments @v by 1
1231a3b1d89SBrian Gerst  * and returns true if the result is zero, or false for all
1241a3b1d89SBrian Gerst  * other cases.
1251a3b1d89SBrian Gerst  */
126117780eeSH. Peter Anvin static inline bool atomic64_inc_and_test(atomic64_t *v)
1271a3b1d89SBrian Gerst {
12818fe5822SH. Peter Anvin 	GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
1291a3b1d89SBrian Gerst }
1301a3b1d89SBrian Gerst 
1311a3b1d89SBrian Gerst /**
1321a3b1d89SBrian Gerst  * atomic64_add_negative - add and test if negative
1331a3b1d89SBrian Gerst  * @i: integer value to add
1341a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
1351a3b1d89SBrian Gerst  *
1361a3b1d89SBrian Gerst  * Atomically adds @i to @v and returns true
1371a3b1d89SBrian Gerst  * if the result is negative, or false when
1381a3b1d89SBrian Gerst  * result is greater than or equal to zero.
1391a3b1d89SBrian Gerst  */
140117780eeSH. Peter Anvin static inline bool atomic64_add_negative(long i, atomic64_t *v)
1411a3b1d89SBrian Gerst {
14218fe5822SH. Peter Anvin 	GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
1431a3b1d89SBrian Gerst }
1441a3b1d89SBrian Gerst 
1451a3b1d89SBrian Gerst /**
1461a3b1d89SBrian Gerst  * atomic64_add_return - add and return
1471a3b1d89SBrian Gerst  * @i: integer value to add
1481a3b1d89SBrian Gerst  * @v: pointer to type atomic64_t
1491a3b1d89SBrian Gerst  *
1501a3b1d89SBrian Gerst  * Atomically adds @i to @v and returns @i + @v
1511a3b1d89SBrian Gerst  */
1523462bd2aSHagen Paul Pfeifer static __always_inline long atomic64_add_return(long i, atomic64_t *v)
1531a3b1d89SBrian Gerst {
1548b8bc2f7SJeremy Fitzhardinge 	return i + xadd(&v->counter, i);
1551a3b1d89SBrian Gerst }
1561a3b1d89SBrian Gerst 
1571a3b1d89SBrian Gerst static inline long atomic64_sub_return(long i, atomic64_t *v)
1581a3b1d89SBrian Gerst {
1591a3b1d89SBrian Gerst 	return atomic64_add_return(-i, v);
1601a3b1d89SBrian Gerst }
1611a3b1d89SBrian Gerst 
162a8bcccabSPeter Zijlstra static inline long atomic64_fetch_add(long i, atomic64_t *v)
163a8bcccabSPeter Zijlstra {
164a8bcccabSPeter Zijlstra 	return xadd(&v->counter, i);
165a8bcccabSPeter Zijlstra }
166a8bcccabSPeter Zijlstra 
167a8bcccabSPeter Zijlstra static inline long atomic64_fetch_sub(long i, atomic64_t *v)
168a8bcccabSPeter Zijlstra {
169a8bcccabSPeter Zijlstra 	return xadd(&v->counter, -i);
170a8bcccabSPeter Zijlstra }
171a8bcccabSPeter Zijlstra 
1721a3b1d89SBrian Gerst #define atomic64_inc_return(v)  (atomic64_add_return(1, (v)))
1731a3b1d89SBrian Gerst #define atomic64_dec_return(v)  (atomic64_sub_return(1, (v)))
1741a3b1d89SBrian Gerst 
1751a3b1d89SBrian Gerst static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
1761a3b1d89SBrian Gerst {
1771a3b1d89SBrian Gerst 	return cmpxchg(&v->counter, old, new);
1781a3b1d89SBrian Gerst }
1791a3b1d89SBrian Gerst 
180a9ebf306SPeter Zijlstra #define atomic64_try_cmpxchg atomic64_try_cmpxchg
181007d185bSDmitry Vyukov static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new)
182a9ebf306SPeter Zijlstra {
183a9ebf306SPeter Zijlstra 	return try_cmpxchg(&v->counter, old, new);
184a9ebf306SPeter Zijlstra }
185a9ebf306SPeter Zijlstra 
1861a3b1d89SBrian Gerst static inline long atomic64_xchg(atomic64_t *v, long new)
1871a3b1d89SBrian Gerst {
1881a3b1d89SBrian Gerst 	return xchg(&v->counter, new);
1891a3b1d89SBrian Gerst }
1901a3b1d89SBrian Gerst 
1911a3b1d89SBrian Gerst /**
1921a3b1d89SBrian Gerst  * atomic64_add_unless - add unless the number is a given value
1931a3b1d89SBrian Gerst  * @v: pointer of type atomic64_t
1941a3b1d89SBrian Gerst  * @a: the amount to add to v...
1951a3b1d89SBrian Gerst  * @u: ...unless v is equal to u.
1961a3b1d89SBrian Gerst  *
1971a3b1d89SBrian Gerst  * Atomically adds @a to @v, so long as it was not @u.
198f24219b4SArun Sharma  * Returns the old value of @v.
1991a3b1d89SBrian Gerst  */
200117780eeSH. Peter Anvin static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
2011a3b1d89SBrian Gerst {
202007d185bSDmitry Vyukov 	s64 c = atomic64_read(v);
203e6790e4bSPeter Zijlstra 	do {
204e6790e4bSPeter Zijlstra 		if (unlikely(c == u))
205e6790e4bSPeter Zijlstra 			return false;
206e6790e4bSPeter Zijlstra 	} while (!atomic64_try_cmpxchg(v, &c, c + a));
207e6790e4bSPeter Zijlstra 	return true;
2081a3b1d89SBrian Gerst }
2091a3b1d89SBrian Gerst 
2101a3b1d89SBrian Gerst #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
2111a3b1d89SBrian Gerst 
212d7f6de1eSLuca Barbieri /*
213d7f6de1eSLuca Barbieri  * atomic64_dec_if_positive - decrement by 1 if old value positive
214d7f6de1eSLuca Barbieri  * @v: pointer of type atomic_t
215d7f6de1eSLuca Barbieri  *
216d7f6de1eSLuca Barbieri  * The function returns the old value of *v minus 1, even if
217d7f6de1eSLuca Barbieri  * the atomic variable, v, was not decremented.
218d7f6de1eSLuca Barbieri  */
219d7f6de1eSLuca Barbieri static inline long atomic64_dec_if_positive(atomic64_t *v)
220d7f6de1eSLuca Barbieri {
221007d185bSDmitry Vyukov 	s64 dec, c = atomic64_read(v);
222e6790e4bSPeter Zijlstra 	do {
223d7f6de1eSLuca Barbieri 		dec = c - 1;
224d7f6de1eSLuca Barbieri 		if (unlikely(dec < 0))
225d7f6de1eSLuca Barbieri 			break;
226e6790e4bSPeter Zijlstra 	} while (!atomic64_try_cmpxchg(v, &c, dec));
227d7f6de1eSLuca Barbieri 	return dec;
228d7f6de1eSLuca Barbieri }
229d7f6de1eSLuca Barbieri 
230ba1c9f83SDmitry Vyukov static inline void atomic64_and(long i, atomic64_t *v)
231ba1c9f83SDmitry Vyukov {
232ba1c9f83SDmitry Vyukov 	asm volatile(LOCK_PREFIX "andq %1,%0"
233ba1c9f83SDmitry Vyukov 			: "+m" (v->counter)
234ba1c9f83SDmitry Vyukov 			: "er" (i)
235ba1c9f83SDmitry Vyukov 			: "memory");
2367fc1845dSPeter Zijlstra }
2377fc1845dSPeter Zijlstra 
238ba1c9f83SDmitry Vyukov static inline long atomic64_fetch_and(long i, atomic64_t *v)
239ba1c9f83SDmitry Vyukov {
240007d185bSDmitry Vyukov 	s64 val = atomic64_read(v);
241ba1c9f83SDmitry Vyukov 
242ba1c9f83SDmitry Vyukov 	do {
243ba1c9f83SDmitry Vyukov 	} while (!atomic64_try_cmpxchg(v, &val, val & i));
244ba1c9f83SDmitry Vyukov 	return val;
245a8bcccabSPeter Zijlstra }
2467fc1845dSPeter Zijlstra 
247ba1c9f83SDmitry Vyukov static inline void atomic64_or(long i, atomic64_t *v)
248ba1c9f83SDmitry Vyukov {
249ba1c9f83SDmitry Vyukov 	asm volatile(LOCK_PREFIX "orq %1,%0"
250ba1c9f83SDmitry Vyukov 			: "+m" (v->counter)
251ba1c9f83SDmitry Vyukov 			: "er" (i)
252ba1c9f83SDmitry Vyukov 			: "memory");
253ba1c9f83SDmitry Vyukov }
254a8bcccabSPeter Zijlstra 
255ba1c9f83SDmitry Vyukov static inline long atomic64_fetch_or(long i, atomic64_t *v)
256ba1c9f83SDmitry Vyukov {
257007d185bSDmitry Vyukov 	s64 val = atomic64_read(v);
258a8bcccabSPeter Zijlstra 
259ba1c9f83SDmitry Vyukov 	do {
260ba1c9f83SDmitry Vyukov 	} while (!atomic64_try_cmpxchg(v, &val, val | i));
261ba1c9f83SDmitry Vyukov 	return val;
262ba1c9f83SDmitry Vyukov }
263ba1c9f83SDmitry Vyukov 
264ba1c9f83SDmitry Vyukov static inline void atomic64_xor(long i, atomic64_t *v)
265ba1c9f83SDmitry Vyukov {
266ba1c9f83SDmitry Vyukov 	asm volatile(LOCK_PREFIX "xorq %1,%0"
267ba1c9f83SDmitry Vyukov 			: "+m" (v->counter)
268ba1c9f83SDmitry Vyukov 			: "er" (i)
269ba1c9f83SDmitry Vyukov 			: "memory");
270ba1c9f83SDmitry Vyukov }
271ba1c9f83SDmitry Vyukov 
272ba1c9f83SDmitry Vyukov static inline long atomic64_fetch_xor(long i, atomic64_t *v)
273ba1c9f83SDmitry Vyukov {
274007d185bSDmitry Vyukov 	s64 val = atomic64_read(v);
275ba1c9f83SDmitry Vyukov 
276ba1c9f83SDmitry Vyukov 	do {
277ba1c9f83SDmitry Vyukov 	} while (!atomic64_try_cmpxchg(v, &val, val ^ i));
278ba1c9f83SDmitry Vyukov 	return val;
279ba1c9f83SDmitry Vyukov }
2807fc1845dSPeter Zijlstra 
2811a3b1d89SBrian Gerst #endif /* _ASM_X86_ATOMIC64_64_H */
282