xref: /linux/arch/s390/include/asm/atomic.h (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright IBM Corp. 1999, 2016
4  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  *	      Denis Joseph Barrow,
6  *	      Arnd Bergmann,
7  */
8 
9 #ifndef __ARCH_S390_ATOMIC__
10 #define __ARCH_S390_ATOMIC__
11 
12 #include <linux/compiler.h>
13 #include <linux/types.h>
14 #include <asm/atomic_ops.h>
15 #include <asm/barrier.h>
16 #include <asm/cmpxchg.h>
17 
18 static __always_inline int arch_atomic_read(const atomic_t *v)
19 {
20 	return __atomic_read(v);
21 }
22 #define arch_atomic_read arch_atomic_read
23 
24 static __always_inline void arch_atomic_set(atomic_t *v, int i)
25 {
26 	__atomic_set(v, i);
27 }
28 #define arch_atomic_set arch_atomic_set
29 
30 static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
31 {
32 	return __atomic_add_barrier(i, &v->counter) + i;
33 }
34 #define arch_atomic_add_return arch_atomic_add_return
35 
36 static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
37 {
38 	return __atomic_add_barrier(i, &v->counter);
39 }
40 #define arch_atomic_fetch_add arch_atomic_fetch_add
41 
42 static __always_inline void arch_atomic_add(int i, atomic_t *v)
43 {
44 	__atomic_add(i, &v->counter);
45 }
46 #define arch_atomic_add arch_atomic_add
47 
48 #define arch_atomic_sub(_i, _v)		arch_atomic_add(-(int)(_i), _v)
49 #define arch_atomic_sub_return(_i, _v)	arch_atomic_add_return(-(int)(_i), _v)
50 #define arch_atomic_fetch_sub(_i, _v)	arch_atomic_fetch_add(-(int)(_i), _v)
51 
52 #define ATOMIC_OPS(op)							\
53 static __always_inline void arch_atomic_##op(int i, atomic_t *v)	\
54 {									\
55 	__atomic_##op(i, &v->counter);					\
56 }									\
57 static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v)	\
58 {									\
59 	return __atomic_##op##_barrier(i, &v->counter);			\
60 }
61 
62 ATOMIC_OPS(and)
63 ATOMIC_OPS(or)
64 ATOMIC_OPS(xor)
65 
66 #undef ATOMIC_OPS
67 
68 #define arch_atomic_and			arch_atomic_and
69 #define arch_atomic_or			arch_atomic_or
70 #define arch_atomic_xor			arch_atomic_xor
71 #define arch_atomic_fetch_and		arch_atomic_fetch_and
72 #define arch_atomic_fetch_or		arch_atomic_fetch_or
73 #define arch_atomic_fetch_xor		arch_atomic_fetch_xor
74 
75 static __always_inline int arch_atomic_xchg(atomic_t *v, int new)
76 {
77 	return arch_xchg(&v->counter, new);
78 }
79 #define arch_atomic_xchg arch_atomic_xchg
80 
81 static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
82 {
83 	return arch_cmpxchg(&v->counter, old, new);
84 }
85 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
86 
87 static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
88 {
89 	return arch_try_cmpxchg(&v->counter, old, new);
90 }
91 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
92 
93 #define ATOMIC64_INIT(i)  { (i) }
94 
95 static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
96 {
97 	return __atomic64_read(v);
98 }
99 #define arch_atomic64_read arch_atomic64_read
100 
101 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
102 {
103 	__atomic64_set(v, i);
104 }
105 #define arch_atomic64_set arch_atomic64_set
106 
107 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
108 {
109 	return __atomic64_add_barrier(i, (long *)&v->counter) + i;
110 }
111 #define arch_atomic64_add_return arch_atomic64_add_return
112 
113 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
114 {
115 	return __atomic64_add_barrier(i, (long *)&v->counter);
116 }
117 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
118 
119 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
120 {
121 	__atomic64_add(i, (long *)&v->counter);
122 }
123 #define arch_atomic64_add arch_atomic64_add
124 
125 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
126 {
127 	return arch_xchg(&v->counter, new);
128 }
129 #define arch_atomic64_xchg arch_atomic64_xchg
130 
131 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
132 {
133 	return arch_cmpxchg(&v->counter, old, new);
134 }
135 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
136 
137 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
138 {
139 	return arch_try_cmpxchg(&v->counter, old, new);
140 }
141 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
142 
143 #define ATOMIC64_OPS(op)							\
144 static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v)		\
145 {										\
146 	__atomic64_##op(i, (long *)&v->counter);				\
147 }										\
148 static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v)	\
149 {										\
150 	return __atomic64_##op##_barrier(i, (long *)&v->counter);		\
151 }
152 
153 ATOMIC64_OPS(and)
154 ATOMIC64_OPS(or)
155 ATOMIC64_OPS(xor)
156 
157 #undef ATOMIC64_OPS
158 
159 #define arch_atomic64_and		arch_atomic64_and
160 #define arch_atomic64_or		arch_atomic64_or
161 #define arch_atomic64_xor		arch_atomic64_xor
162 #define arch_atomic64_fetch_and		arch_atomic64_fetch_and
163 #define arch_atomic64_fetch_or		arch_atomic64_fetch_or
164 #define arch_atomic64_fetch_xor		arch_atomic64_fetch_xor
165 
166 #define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v)
167 #define arch_atomic64_fetch_sub(_i, _v)  arch_atomic64_fetch_add(-(s64)(_i), _v)
168 #define arch_atomic64_sub(_i, _v)	 arch_atomic64_add(-(s64)(_i), _v)
169 
170 #endif /* __ARCH_S390_ATOMIC__  */
171