xref: /linux/arch/sparc/include/asm/atomic_64.h (revision f3a8b6645dc2e60d11f20c1c23afd964ff4e55ae)
1 /* atomic.h: Thankfully the V9 is at least reasonable for this
2  *           stuff.
3  *
4  * Copyright (C) 1996, 1997, 2000, 2012 David S. Miller (davem@redhat.com)
5  */
6 
7 #ifndef __ARCH_SPARC64_ATOMIC__
8 #define __ARCH_SPARC64_ATOMIC__
9 
10 #include <linux/types.h>
11 #include <asm/cmpxchg.h>
12 #include <asm/barrier.h>
13 
14 #define ATOMIC_INIT(i)		{ (i) }
15 #define ATOMIC64_INIT(i)	{ (i) }
16 
17 #define atomic_read(v)		READ_ONCE((v)->counter)
18 #define atomic64_read(v)	READ_ONCE((v)->counter)
19 
20 #define atomic_set(v, i)	WRITE_ONCE(((v)->counter), (i))
21 #define atomic64_set(v, i)	WRITE_ONCE(((v)->counter), (i))
22 
23 #define ATOMIC_OP(op)							\
24 void atomic_##op(int, atomic_t *);					\
25 void atomic64_##op(long, atomic64_t *);
26 
27 #define ATOMIC_OP_RETURN(op)						\
28 int atomic_##op##_return(int, atomic_t *);				\
29 long atomic64_##op##_return(long, atomic64_t *);
30 
31 #define ATOMIC_FETCH_OP(op)						\
32 int atomic_fetch_##op(int, atomic_t *);					\
33 long atomic64_fetch_##op(long, atomic64_t *);
34 
35 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
36 
37 ATOMIC_OPS(add)
38 ATOMIC_OPS(sub)
39 
40 #undef ATOMIC_OPS
41 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
42 
43 ATOMIC_OPS(and)
44 ATOMIC_OPS(or)
45 ATOMIC_OPS(xor)
46 
47 #undef ATOMIC_OPS
48 #undef ATOMIC_FETCH_OP
49 #undef ATOMIC_OP_RETURN
50 #undef ATOMIC_OP
51 
52 #define atomic_dec_return(v)   atomic_sub_return(1, v)
53 #define atomic64_dec_return(v) atomic64_sub_return(1, v)
54 
55 #define atomic_inc_return(v)   atomic_add_return(1, v)
56 #define atomic64_inc_return(v) atomic64_add_return(1, v)
57 
58 /*
59  * atomic_inc_and_test - increment and test
60  * @v: pointer of type atomic_t
61  *
62  * Atomically increments @v by 1
63  * and returns true if the result is zero, or false for all
64  * other cases.
65  */
66 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
67 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
68 
69 #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
70 #define atomic64_sub_and_test(i, v) (atomic64_sub_return(i, v) == 0)
71 
72 #define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
73 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, v) == 0)
74 
75 #define atomic_inc(v) atomic_add(1, v)
76 #define atomic64_inc(v) atomic64_add(1, v)
77 
78 #define atomic_dec(v) atomic_sub(1, v)
79 #define atomic64_dec(v) atomic64_sub(1, v)
80 
81 #define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
82 #define atomic64_add_negative(i, v) (atomic64_add_return(i, v) < 0)
83 
84 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
85 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
86 
87 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
88 {
89 	int c, old;
90 	c = atomic_read(v);
91 	for (;;) {
92 		if (unlikely(c == (u)))
93 			break;
94 		old = atomic_cmpxchg((v), c, c + (a));
95 		if (likely(old == c))
96 			break;
97 		c = old;
98 	}
99 	return c;
100 }
101 
102 #define atomic64_cmpxchg(v, o, n) \
103 	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
104 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
105 
106 static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
107 {
108 	long c, old;
109 	c = atomic64_read(v);
110 	for (;;) {
111 		if (unlikely(c == (u)))
112 			break;
113 		old = atomic64_cmpxchg((v), c, c + (a));
114 		if (likely(old == c))
115 			break;
116 		c = old;
117 	}
118 	return c != (u);
119 }
120 
121 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
122 
123 long atomic64_dec_if_positive(atomic64_t *v);
124 
125 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
126