xref: /linux/arch/s390/include/asm/cmpxchg.h (revision 95e9fd10f06cb5642028b6b851e32b8c8afb4571)
1 /*
2  * Copyright IBM Corp. 1999, 2011
3  *
4  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  */
6 
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
9 
10 #include <linux/types.h>
11 
12 extern void __xchg_called_with_bad_pointer(void);
13 
14 static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
15 {
16 	unsigned long addr, old;
17 	int shift;
18 
19 	switch (size) {
20 	case 1:
21 		addr = (unsigned long) ptr;
22 		shift = (3 ^ (addr & 3)) << 3;
23 		addr ^= addr & 3;
24 		asm volatile(
25 			"	l	%0,%4\n"
26 			"0:	lr	0,%0\n"
27 			"	nr	0,%3\n"
28 			"	or	0,%2\n"
29 			"	cs	%0,0,%4\n"
30 			"	jl	0b\n"
31 			: "=&d" (old), "=Q" (*(int *) addr)
32 			: "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
33 			  "Q" (*(int *) addr) : "memory", "cc", "0");
34 		return old >> shift;
35 	case 2:
36 		addr = (unsigned long) ptr;
37 		shift = (2 ^ (addr & 2)) << 3;
38 		addr ^= addr & 2;
39 		asm volatile(
40 			"	l	%0,%4\n"
41 			"0:	lr	0,%0\n"
42 			"	nr	0,%3\n"
43 			"	or	0,%2\n"
44 			"	cs	%0,0,%4\n"
45 			"	jl	0b\n"
46 			: "=&d" (old), "=Q" (*(int *) addr)
47 			: "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
48 			  "Q" (*(int *) addr) : "memory", "cc", "0");
49 		return old >> shift;
50 	case 4:
51 		asm volatile(
52 			"	l	%0,%3\n"
53 			"0:	cs	%0,%2,%3\n"
54 			"	jl	0b\n"
55 			: "=&d" (old), "=Q" (*(int *) ptr)
56 			: "d" (x), "Q" (*(int *) ptr)
57 			: "memory", "cc");
58 		return old;
59 #ifdef CONFIG_64BIT
60 	case 8:
61 		asm volatile(
62 			"	lg	%0,%3\n"
63 			"0:	csg	%0,%2,%3\n"
64 			"	jl	0b\n"
65 			: "=&d" (old), "=m" (*(long *) ptr)
66 			: "d" (x), "Q" (*(long *) ptr)
67 			: "memory", "cc");
68 		return old;
69 #endif /* CONFIG_64BIT */
70 	}
71 	__xchg_called_with_bad_pointer();
72 	return x;
73 }
74 
75 #define xchg(ptr, x)							  \
76 ({									  \
77 	__typeof__(*(ptr)) __ret;					  \
78 	__ret = (__typeof__(*(ptr)))					  \
79 		__xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
80 	__ret;								  \
81 })
82 
83 /*
84  * Atomic compare and exchange.	 Compare OLD with MEM, if identical,
85  * store NEW in MEM.  Return the initial value in MEM.	Success is
86  * indicated by comparing RETURN with OLD.
87  */
88 
89 #define __HAVE_ARCH_CMPXCHG
90 
91 extern void __cmpxchg_called_with_bad_pointer(void);
92 
93 static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
94 				      unsigned long new, int size)
95 {
96 	unsigned long addr, prev, tmp;
97 	int shift;
98 
99 	switch (size) {
100 	case 1:
101 		addr = (unsigned long) ptr;
102 		shift = (3 ^ (addr & 3)) << 3;
103 		addr ^= addr & 3;
104 		asm volatile(
105 			"	l	%0,%2\n"
106 			"0:	nr	%0,%5\n"
107 			"	lr	%1,%0\n"
108 			"	or	%0,%3\n"
109 			"	or	%1,%4\n"
110 			"	cs	%0,%1,%2\n"
111 			"	jnl	1f\n"
112 			"	xr	%1,%0\n"
113 			"	nr	%1,%5\n"
114 			"	jnz	0b\n"
115 			"1:"
116 			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
117 			: "d" ((old & 0xff) << shift),
118 			  "d" ((new & 0xff) << shift),
119 			  "d" (~(0xff << shift))
120 			: "memory", "cc");
121 		return prev >> shift;
122 	case 2:
123 		addr = (unsigned long) ptr;
124 		shift = (2 ^ (addr & 2)) << 3;
125 		addr ^= addr & 2;
126 		asm volatile(
127 			"	l	%0,%2\n"
128 			"0:	nr	%0,%5\n"
129 			"	lr	%1,%0\n"
130 			"	or	%0,%3\n"
131 			"	or	%1,%4\n"
132 			"	cs	%0,%1,%2\n"
133 			"	jnl	1f\n"
134 			"	xr	%1,%0\n"
135 			"	nr	%1,%5\n"
136 			"	jnz	0b\n"
137 			"1:"
138 			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
139 			: "d" ((old & 0xffff) << shift),
140 			  "d" ((new & 0xffff) << shift),
141 			  "d" (~(0xffff << shift))
142 			: "memory", "cc");
143 		return prev >> shift;
144 	case 4:
145 		asm volatile(
146 			"	cs	%0,%3,%1\n"
147 			: "=&d" (prev), "=Q" (*(int *) ptr)
148 			: "0" (old), "d" (new), "Q" (*(int *) ptr)
149 			: "memory", "cc");
150 		return prev;
151 #ifdef CONFIG_64BIT
152 	case 8:
153 		asm volatile(
154 			"	csg	%0,%3,%1\n"
155 			: "=&d" (prev), "=Q" (*(long *) ptr)
156 			: "0" (old), "d" (new), "Q" (*(long *) ptr)
157 			: "memory", "cc");
158 		return prev;
159 #endif /* CONFIG_64BIT */
160 	}
161 	__cmpxchg_called_with_bad_pointer();
162 	return old;
163 }
164 
165 #define cmpxchg(ptr, o, n)						 \
166 ({									 \
167 	__typeof__(*(ptr)) __ret;					 \
168 	__ret = (__typeof__(*(ptr)))					 \
169 		__cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
170 			  sizeof(*(ptr)));				 \
171 	__ret;								 \
172 })
173 
174 #ifdef CONFIG_64BIT
175 #define cmpxchg64(ptr, o, n)						\
176 ({									\
177 	cmpxchg((ptr), (o), (n));					\
178 })
179 #else /* CONFIG_64BIT */
180 static inline unsigned long long __cmpxchg64(void *ptr,
181 					     unsigned long long old,
182 					     unsigned long long new)
183 {
184 	register_pair rp_old = {.pair = old};
185 	register_pair rp_new = {.pair = new};
186 
187 	asm volatile(
188 		"	cds	%0,%2,%1"
189 		: "+&d" (rp_old), "=Q" (ptr)
190 		: "d" (rp_new), "Q" (ptr)
191 		: "memory", "cc");
192 	return rp_old.pair;
193 }
194 
195 #define cmpxchg64(ptr, o, n)				\
196 ({							\
197 	__typeof__(*(ptr)) __ret;			\
198 	__ret = (__typeof__(*(ptr)))			\
199 		__cmpxchg64((ptr),			\
200 			    (unsigned long long)(o),	\
201 			    (unsigned long long)(n));	\
202 	__ret;						\
203 })
204 #endif /* CONFIG_64BIT */
205 
206 #include <asm-generic/cmpxchg-local.h>
207 
208 static inline unsigned long __cmpxchg_local(void *ptr,
209 					    unsigned long old,
210 					    unsigned long new, int size)
211 {
212 	switch (size) {
213 	case 1:
214 	case 2:
215 	case 4:
216 #ifdef CONFIG_64BIT
217 	case 8:
218 #endif
219 		return __cmpxchg(ptr, old, new, size);
220 	default:
221 		return __cmpxchg_local_generic(ptr, old, new, size);
222 	}
223 
224 	return old;
225 }
226 
227 /*
228  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
229  * them available.
230  */
231 #define cmpxchg_local(ptr, o, n)					\
232 ({									\
233 	__typeof__(*(ptr)) __ret;					\
234 	__ret = (__typeof__(*(ptr)))					\
235 		__cmpxchg_local((ptr), (unsigned long)(o),		\
236 				(unsigned long)(n), sizeof(*(ptr)));	\
237 	__ret;								\
238 })
239 
240 #define cmpxchg64_local(ptr, o, n)	cmpxchg64((ptr), (o), (n))
241 
242 #endif /* __ASM_CMPXCHG_H */
243