xref: /linux/arch/s390/include/asm/cmpxchg.h (revision 3f0a50f345f78183f6e9b39c2f45ca5dcaa511ca)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright IBM Corp. 1999, 2011
4  *
5  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
6  */
7 
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10 
11 #include <linux/mmdebug.h>
12 #include <linux/types.h>
13 #include <linux/bug.h>
14 
15 void __xchg_called_with_bad_pointer(void);
16 
17 static __always_inline unsigned long __xchg(unsigned long x,
18 					    unsigned long address, int size)
19 {
20 	unsigned long old;
21 	int shift;
22 
23 	switch (size) {
24 	case 1:
25 		shift = (3 ^ (address & 3)) << 3;
26 		address ^= address & 3;
27 		asm volatile(
28 			"       l       %0,%1\n"
29 			"0:     lr      0,%0\n"
30 			"       nr      0,%3\n"
31 			"       or      0,%2\n"
32 			"       cs      %0,0,%1\n"
33 			"       jl      0b\n"
34 			: "=&d" (old), "+Q" (*(int *) address)
35 			: "d" ((x & 0xff) << shift), "d" (~(0xff << shift))
36 			: "memory", "cc", "0");
37 		return old >> shift;
38 	case 2:
39 		shift = (2 ^ (address & 2)) << 3;
40 		address ^= address & 2;
41 		asm volatile(
42 			"       l       %0,%1\n"
43 			"0:     lr      0,%0\n"
44 			"       nr      0,%3\n"
45 			"       or      0,%2\n"
46 			"       cs      %0,0,%1\n"
47 			"       jl      0b\n"
48 			: "=&d" (old), "+Q" (*(int *) address)
49 			: "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift))
50 			: "memory", "cc", "0");
51 		return old >> shift;
52 	case 4:
53 		asm volatile(
54 			"       l       %0,%1\n"
55 			"0:     cs      %0,%2,%1\n"
56 			"       jl      0b\n"
57 			: "=&d" (old), "+Q" (*(int *) address)
58 			: "d" (x)
59 			: "memory", "cc");
60 		return old;
61 	case 8:
62 		asm volatile(
63 			"       lg      %0,%1\n"
64 			"0:     csg     %0,%2,%1\n"
65 			"       jl      0b\n"
66 			: "=&d" (old), "+QS" (*(long *) address)
67 			: "d" (x)
68 			: "memory", "cc");
69 		return old;
70 	}
71 	__xchg_called_with_bad_pointer();
72 	return x;
73 }
74 
75 #define arch_xchg(ptr, x)						\
76 ({									\
77 	__typeof__(*(ptr)) __ret;					\
78 									\
79 	__ret = (__typeof__(*(ptr)))					\
80 		__xchg((unsigned long)(x), (unsigned long)(ptr),	\
81 		       sizeof(*(ptr)));					\
82 	__ret;								\
83 })
84 
85 void __cmpxchg_called_with_bad_pointer(void);
86 
87 static __always_inline unsigned long __cmpxchg(unsigned long address,
88 					       unsigned long old,
89 					       unsigned long new, int size)
90 {
91 	unsigned long prev, tmp;
92 	int shift;
93 
94 	switch (size) {
95 	case 1:
96 		shift = (3 ^ (address & 3)) << 3;
97 		address ^= address & 3;
98 		asm volatile(
99 			"       l       %0,%2\n"
100 			"0:     nr      %0,%5\n"
101 			"       lr      %1,%0\n"
102 			"       or      %0,%3\n"
103 			"       or      %1,%4\n"
104 			"       cs      %0,%1,%2\n"
105 			"       jnl     1f\n"
106 			"       xr      %1,%0\n"
107 			"       nr      %1,%5\n"
108 			"       jnz     0b\n"
109 			"1:"
110 			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
111 			: "d" ((old & 0xff) << shift),
112 			  "d" ((new & 0xff) << shift),
113 			  "d" (~(0xff << shift))
114 			: "memory", "cc");
115 		return prev >> shift;
116 	case 2:
117 		shift = (2 ^ (address & 2)) << 3;
118 		address ^= address & 2;
119 		asm volatile(
120 			"       l       %0,%2\n"
121 			"0:     nr      %0,%5\n"
122 			"       lr      %1,%0\n"
123 			"       or      %0,%3\n"
124 			"       or      %1,%4\n"
125 			"       cs      %0,%1,%2\n"
126 			"       jnl     1f\n"
127 			"       xr      %1,%0\n"
128 			"       nr      %1,%5\n"
129 			"       jnz     0b\n"
130 			"1:"
131 			: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
132 			: "d" ((old & 0xffff) << shift),
133 			  "d" ((new & 0xffff) << shift),
134 			  "d" (~(0xffff << shift))
135 			: "memory", "cc");
136 		return prev >> shift;
137 	case 4:
138 		asm volatile(
139 			"       cs      %0,%3,%1\n"
140 			: "=&d" (prev), "+Q" (*(int *) address)
141 			: "0" (old), "d" (new)
142 			: "memory", "cc");
143 		return prev;
144 	case 8:
145 		asm volatile(
146 			"       csg     %0,%3,%1\n"
147 			: "=&d" (prev), "+QS" (*(long *) address)
148 			: "0" (old), "d" (new)
149 			: "memory", "cc");
150 		return prev;
151 	}
152 	__cmpxchg_called_with_bad_pointer();
153 	return old;
154 }
155 
156 #define arch_cmpxchg(ptr, o, n)						\
157 ({									\
158 	__typeof__(*(ptr)) __ret;					\
159 									\
160 	__ret = (__typeof__(*(ptr)))					\
161 		__cmpxchg((unsigned long)(ptr), (unsigned long)(o),	\
162 			  (unsigned long)(n), sizeof(*(ptr)));		\
163 	__ret;								\
164 })
165 
166 #define arch_cmpxchg64		arch_cmpxchg
167 #define arch_cmpxchg_local	arch_cmpxchg
168 #define arch_cmpxchg64_local	arch_cmpxchg
169 
170 #define system_has_cmpxchg_double()	1
171 
172 static __always_inline int __cmpxchg_double(unsigned long p1, unsigned long p2,
173 					    unsigned long o1, unsigned long o2,
174 					    unsigned long n1, unsigned long n2)
175 {
176 	union register_pair old = { .even = o1, .odd = o2, };
177 	union register_pair new = { .even = n1, .odd = n2, };
178 	int cc;
179 
180 	asm volatile(
181 		"	cdsg	%[old],%[new],%[ptr]\n"
182 		"	ipm	%[cc]\n"
183 		"	srl	%[cc],28\n"
184 		: [cc] "=&d" (cc), [old] "+&d" (old.pair)
185 		: [new] "d" (new.pair),
186 		  [ptr] "QS" (*(unsigned long *)p1), "Q" (*(unsigned long *)p2)
187 		: "memory", "cc");
188 	return !cc;
189 }
190 
191 #define arch_cmpxchg_double(p1, p2, o1, o2, n1, n2)			\
192 ({									\
193 	typeof(p1) __p1 = (p1);						\
194 	typeof(p2) __p2 = (p2);						\
195 									\
196 	BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));			\
197 	BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));			\
198 	VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
199 	__cmpxchg_double((unsigned long)__p1, (unsigned long)__p2,	\
200 			 (unsigned long)(o1), (unsigned long)(o2),	\
201 			 (unsigned long)(n1), (unsigned long)(n2));	\
202 })
203 
204 #endif /* __ASM_CMPXCHG_H */
205