xref: /linux/arch/s390/include/asm/atomic_ops.h (revision e814f3fd16acfb7f9966773953de8f740a1e3202)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Low level function for atomic operations
4  *
5  * Copyright IBM Corp. 1999, 2016
6  */
7 
8 #ifndef __ARCH_S390_ATOMIC_OPS__
9 #define __ARCH_S390_ATOMIC_OPS__
10 
11 #include <linux/limits.h>
12 #include <asm/march.h>
13 #include <asm/asm.h>
14 
15 static __always_inline int __atomic_read(const int *ptr)
16 {
17 	int val;
18 
19 	asm volatile(
20 		"	l	%[val],%[ptr]\n"
21 		: [val] "=d" (val) : [ptr] "R" (*ptr));
22 	return val;
23 }
24 
25 static __always_inline void __atomic_set(int *ptr, int val)
26 {
27 	if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
28 		asm volatile(
29 			"	mvhi	%[ptr],%[val]\n"
30 			: [ptr] "=Q" (*ptr) : [val] "K" (val));
31 	} else {
32 		asm volatile(
33 			"	st	%[val],%[ptr]\n"
34 			: [ptr] "=R" (*ptr) : [val] "d" (val));
35 	}
36 }
37 
38 static __always_inline long __atomic64_read(const long *ptr)
39 {
40 	long val;
41 
42 	asm volatile(
43 		"	lg	%[val],%[ptr]\n"
44 		: [val] "=d" (val) : [ptr] "RT" (*ptr));
45 	return val;
46 }
47 
48 static __always_inline void __atomic64_set(long *ptr, long val)
49 {
50 	if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
51 		asm volatile(
52 			"	mvghi	%[ptr],%[val]\n"
53 			: [ptr] "=Q" (*ptr) : [val] "K" (val));
54 	} else {
55 		asm volatile(
56 			"	stg	%[val],%[ptr]\n"
57 			: [ptr] "=RT" (*ptr) : [val] "d" (val));
58 	}
59 }
60 
61 #ifdef MARCH_HAS_Z196_FEATURES
62 
63 #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier)		\
64 static __always_inline op_type op_name(op_type val, op_type *ptr)	\
65 {									\
66 	op_type old;							\
67 									\
68 	asm volatile(							\
69 		op_string "	%[old],%[val],%[ptr]\n"			\
70 		op_barrier						\
71 		: [old] "=d" (old), [ptr] "+QS" (*ptr)			\
72 		: [val] "d" (val) : "cc", "memory");			\
73 	return old;							\
74 }									\
75 
76 #define __ATOMIC_OPS(op_name, op_type, op_string)			\
77 	__ATOMIC_OP(op_name, op_type, op_string, "")			\
78 	__ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
79 
80 __ATOMIC_OPS(__atomic_add, int, "laa")
81 __ATOMIC_OPS(__atomic_and, int, "lan")
82 __ATOMIC_OPS(__atomic_or,  int, "lao")
83 __ATOMIC_OPS(__atomic_xor, int, "lax")
84 
85 __ATOMIC_OPS(__atomic64_add, long, "laag")
86 __ATOMIC_OPS(__atomic64_and, long, "lang")
87 __ATOMIC_OPS(__atomic64_or,  long, "laog")
88 __ATOMIC_OPS(__atomic64_xor, long, "laxg")
89 
90 #undef __ATOMIC_OPS
91 #undef __ATOMIC_OP
92 
93 #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier)	\
94 static __always_inline void op_name(op_type val, op_type *ptr)		\
95 {									\
96 	asm volatile(							\
97 		op_string "	%[ptr],%[val]\n"			\
98 		op_barrier						\
99 		: [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
100 }
101 
102 #define __ATOMIC_CONST_OPS(op_name, op_type, op_string)			\
103 	__ATOMIC_CONST_OP(op_name, op_type, op_string, "")		\
104 	__ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
105 
106 __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
107 __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
108 
109 #undef __ATOMIC_CONST_OPS
110 #undef __ATOMIC_CONST_OP
111 
112 #else /* MARCH_HAS_Z196_FEATURES */
113 
114 #define __ATOMIC_OP(op_name, op_string)					\
115 static __always_inline int op_name(int val, int *ptr)			\
116 {									\
117 	int old, new;							\
118 									\
119 	asm volatile(							\
120 		"0:	lr	%[new],%[old]\n"			\
121 		op_string "	%[new],%[val]\n"			\
122 		"	cs	%[old],%[new],%[ptr]\n"			\
123 		"	jl	0b"					\
124 		: [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
125 		: [val] "d" (val), "0" (*ptr) : "cc", "memory");	\
126 	return old;							\
127 }
128 
129 #define __ATOMIC_OPS(op_name, op_string)				\
130 	__ATOMIC_OP(op_name, op_string)					\
131 	__ATOMIC_OP(op_name##_barrier, op_string)
132 
133 __ATOMIC_OPS(__atomic_add, "ar")
134 __ATOMIC_OPS(__atomic_and, "nr")
135 __ATOMIC_OPS(__atomic_or,  "or")
136 __ATOMIC_OPS(__atomic_xor, "xr")
137 
138 #undef __ATOMIC_OPS
139 
140 #define __ATOMIC64_OP(op_name, op_string)				\
141 static __always_inline long op_name(long val, long *ptr)		\
142 {									\
143 	long old, new;							\
144 									\
145 	asm volatile(							\
146 		"0:	lgr	%[new],%[old]\n"			\
147 		op_string "	%[new],%[val]\n"			\
148 		"	csg	%[old],%[new],%[ptr]\n"			\
149 		"	jl	0b"					\
150 		: [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\
151 		: [val] "d" (val), "0" (*ptr) : "cc", "memory");	\
152 	return old;							\
153 }
154 
155 #define __ATOMIC64_OPS(op_name, op_string)				\
156 	__ATOMIC64_OP(op_name, op_string)				\
157 	__ATOMIC64_OP(op_name##_barrier, op_string)
158 
159 __ATOMIC64_OPS(__atomic64_add, "agr")
160 __ATOMIC64_OPS(__atomic64_and, "ngr")
161 __ATOMIC64_OPS(__atomic64_or,  "ogr")
162 __ATOMIC64_OPS(__atomic64_xor, "xgr")
163 
164 #undef __ATOMIC64_OPS
165 
166 #define __atomic_add_const(val, ptr)		__atomic_add(val, ptr)
167 #define __atomic_add_const_barrier(val, ptr)	__atomic_add(val, ptr)
168 #define __atomic64_add_const(val, ptr)		__atomic64_add(val, ptr)
169 #define __atomic64_add_const_barrier(val, ptr)	__atomic64_add(val, ptr)
170 
171 #endif /* MARCH_HAS_Z196_FEATURES */
172 
173 #if defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__)
174 
175 #define __ATOMIC_TEST_OP(op_name, op_type, op_string, op_barrier)	\
176 static __always_inline bool op_name(op_type val, op_type *ptr)		\
177 {									\
178 	op_type tmp;							\
179 	int cc;								\
180 									\
181 	asm volatile(							\
182 		op_string "	%[tmp],%[val],%[ptr]\n"			\
183 		op_barrier						\
184 		: "=@cc" (cc), [tmp] "=d" (tmp), [ptr] "+QS" (*ptr)	\
185 		: [val] "d" (val)					\
186 		: "memory");						\
187 	return (cc == 0) || (cc == 2);					\
188 }									\
189 
190 #define __ATOMIC_TEST_OPS(op_name, op_type, op_string)			\
191 	__ATOMIC_TEST_OP(op_name, op_type, op_string, "")		\
192 	__ATOMIC_TEST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
193 
194 __ATOMIC_TEST_OPS(__atomic_add_and_test, int, "laal")
195 __ATOMIC_TEST_OPS(__atomic64_add_and_test, long, "laalg")
196 
197 #undef __ATOMIC_TEST_OPS
198 #undef __ATOMIC_TEST_OP
199 
200 #define __ATOMIC_CONST_TEST_OP(op_name, op_type, op_string, op_barrier)	\
201 static __always_inline bool op_name(op_type val, op_type *ptr)		\
202 {									\
203 	int cc;								\
204 									\
205 	asm volatile(							\
206 		op_string "	%[ptr],%[val]\n"			\
207 		op_barrier						\
208 		: "=@cc" (cc), [ptr] "+QS" (*ptr)			\
209 		: [val] "i" (val)					\
210 		: "memory");						\
211 	return (cc == 0) || (cc == 2);					\
212 }
213 
214 #define __ATOMIC_CONST_TEST_OPS(op_name, op_type, op_string)		\
215 	__ATOMIC_CONST_TEST_OP(op_name, op_type, op_string, "")		\
216 	__ATOMIC_CONST_TEST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
217 
218 __ATOMIC_CONST_TEST_OPS(__atomic_add_const_and_test, int, "alsi")
219 __ATOMIC_CONST_TEST_OPS(__atomic64_add_const_and_test, long, "algsi")
220 
221 #undef __ATOMIC_CONST_TEST_OPS
222 #undef __ATOMIC_CONST_TEST_OP
223 
224 #else /* defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__) */
225 
226 #define __ATOMIC_TEST_OP(op_name, op_func, op_type)			\
227 static __always_inline bool op_name(op_type val, op_type *ptr)		\
228 {									\
229 	return op_func(val, ptr) == -val;				\
230 }
231 
232 __ATOMIC_TEST_OP(__atomic_add_and_test,			__atomic_add,		int)
233 __ATOMIC_TEST_OP(__atomic_add_and_test_barrier,		__atomic_add_barrier,	int)
234 __ATOMIC_TEST_OP(__atomic_add_const_and_test,		__atomic_add,		int)
235 __ATOMIC_TEST_OP(__atomic_add_const_and_test_barrier,	__atomic_add_barrier,	int)
236 __ATOMIC_TEST_OP(__atomic64_add_and_test,		__atomic64_add,		long)
237 __ATOMIC_TEST_OP(__atomic64_add_and_test_barrier,	__atomic64_add_barrier, long)
238 __ATOMIC_TEST_OP(__atomic64_add_const_and_test,		__atomic64_add,		long)
239 __ATOMIC_TEST_OP(__atomic64_add_const_and_test_barrier,	__atomic64_add_barrier,	long)
240 
241 #undef __ATOMIC_TEST_OP
242 
243 #endif /* defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__) */
244 
245 #endif /* __ARCH_S390_ATOMIC_OPS__  */
246