xref: /linux/arch/alpha/include/asm/xchg.h (revision c13aca79ff3c4af5fd31a5b2743a90eba6e36a26)
1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
25ba840f9SPaul Gortmaker #ifndef _ALPHA_CMPXCHG_H
3a6209d6dSIvan Kokshaysky #error Do not include xchg.h directly!
4a6209d6dSIvan Kokshaysky #else
5a6209d6dSIvan Kokshaysky /*
6a6209d6dSIvan Kokshaysky  * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7a6209d6dSIvan Kokshaysky  * except that local version do not have the expensive memory barrier.
85ba840f9SPaul Gortmaker  * So this file is included twice from asm/cmpxchg.h.
9a6209d6dSIvan Kokshaysky  */
10a6209d6dSIvan Kokshaysky 
11a6209d6dSIvan Kokshaysky /*
12a6209d6dSIvan Kokshaysky  * Atomic exchange.
13a6209d6dSIvan Kokshaysky  * Since it can be used to implement critical sections
14a6209d6dSIvan Kokshaysky  * it must clobber "memory" (also for interrupts in UP).
15a6209d6dSIvan Kokshaysky  */
16a6209d6dSIvan Kokshaysky 
17a6209d6dSIvan Kokshaysky static inline unsigned long
____xchg(_u8,volatile char * m,unsigned long val)18a6209d6dSIvan Kokshaysky ____xchg(_u8, volatile char *m, unsigned long val)
19a6209d6dSIvan Kokshaysky {
20a6209d6dSIvan Kokshaysky 	unsigned long ret, tmp, addr64;
21a6209d6dSIvan Kokshaysky 
22a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
23a6209d6dSIvan Kokshaysky 	"	andnot	%4,7,%3\n"
24a6209d6dSIvan Kokshaysky 	"	insbl	%1,%4,%1\n"
25a6209d6dSIvan Kokshaysky 	"1:	ldq_l	%2,0(%3)\n"
26a6209d6dSIvan Kokshaysky 	"	extbl	%2,%4,%0\n"
27a6209d6dSIvan Kokshaysky 	"	mskbl	%2,%4,%2\n"
28a6209d6dSIvan Kokshaysky 	"	or	%1,%2,%2\n"
29a6209d6dSIvan Kokshaysky 	"	stq_c	%2,0(%3)\n"
30a6209d6dSIvan Kokshaysky 	"	beq	%2,2f\n"
31a6209d6dSIvan Kokshaysky 	".subsection 2\n"
32a6209d6dSIvan Kokshaysky 	"2:	br	1b\n"
33a6209d6dSIvan Kokshaysky 	".previous"
34a6209d6dSIvan Kokshaysky 	: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
35a6209d6dSIvan Kokshaysky 	: "r" ((long)m), "1" (val) : "memory");
36a6209d6dSIvan Kokshaysky 
37a6209d6dSIvan Kokshaysky 	return ret;
38a6209d6dSIvan Kokshaysky }
39a6209d6dSIvan Kokshaysky 
40a6209d6dSIvan Kokshaysky static inline unsigned long
____xchg(_u16,volatile short * m,unsigned long val)41a6209d6dSIvan Kokshaysky ____xchg(_u16, volatile short *m, unsigned long val)
42a6209d6dSIvan Kokshaysky {
43a6209d6dSIvan Kokshaysky 	unsigned long ret, tmp, addr64;
44a6209d6dSIvan Kokshaysky 
45a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
46a6209d6dSIvan Kokshaysky 	"	andnot	%4,7,%3\n"
47a6209d6dSIvan Kokshaysky 	"	inswl	%1,%4,%1\n"
48a6209d6dSIvan Kokshaysky 	"1:	ldq_l	%2,0(%3)\n"
49a6209d6dSIvan Kokshaysky 	"	extwl	%2,%4,%0\n"
50a6209d6dSIvan Kokshaysky 	"	mskwl	%2,%4,%2\n"
51a6209d6dSIvan Kokshaysky 	"	or	%1,%2,%2\n"
52a6209d6dSIvan Kokshaysky 	"	stq_c	%2,0(%3)\n"
53a6209d6dSIvan Kokshaysky 	"	beq	%2,2f\n"
54a6209d6dSIvan Kokshaysky 	".subsection 2\n"
55a6209d6dSIvan Kokshaysky 	"2:	br	1b\n"
56a6209d6dSIvan Kokshaysky 	".previous"
57a6209d6dSIvan Kokshaysky 	: "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
58a6209d6dSIvan Kokshaysky 	: "r" ((long)m), "1" (val) : "memory");
59a6209d6dSIvan Kokshaysky 
60a6209d6dSIvan Kokshaysky 	return ret;
61a6209d6dSIvan Kokshaysky }
62a6209d6dSIvan Kokshaysky 
63a6209d6dSIvan Kokshaysky static inline unsigned long
____xchg(_u32,volatile int * m,unsigned long val)64a6209d6dSIvan Kokshaysky ____xchg(_u32, volatile int *m, unsigned long val)
65a6209d6dSIvan Kokshaysky {
66a6209d6dSIvan Kokshaysky 	unsigned long dummy;
67a6209d6dSIvan Kokshaysky 
68a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
69a6209d6dSIvan Kokshaysky 	"1:	ldl_l %0,%4\n"
70a6209d6dSIvan Kokshaysky 	"	bis $31,%3,%1\n"
71a6209d6dSIvan Kokshaysky 	"	stl_c %1,%2\n"
72a6209d6dSIvan Kokshaysky 	"	beq %1,2f\n"
73a6209d6dSIvan Kokshaysky 	".subsection 2\n"
74a6209d6dSIvan Kokshaysky 	"2:	br 1b\n"
75a6209d6dSIvan Kokshaysky 	".previous"
76a6209d6dSIvan Kokshaysky 	: "=&r" (val), "=&r" (dummy), "=m" (*m)
77a6209d6dSIvan Kokshaysky 	: "rI" (val), "m" (*m) : "memory");
78a6209d6dSIvan Kokshaysky 
79a6209d6dSIvan Kokshaysky 	return val;
80a6209d6dSIvan Kokshaysky }
81a6209d6dSIvan Kokshaysky 
82a6209d6dSIvan Kokshaysky static inline unsigned long
____xchg(_u64,volatile long * m,unsigned long val)83a6209d6dSIvan Kokshaysky ____xchg(_u64, volatile long *m, unsigned long val)
84a6209d6dSIvan Kokshaysky {
85a6209d6dSIvan Kokshaysky 	unsigned long dummy;
86a6209d6dSIvan Kokshaysky 
87a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
88a6209d6dSIvan Kokshaysky 	"1:	ldq_l %0,%4\n"
89a6209d6dSIvan Kokshaysky 	"	bis $31,%3,%1\n"
90a6209d6dSIvan Kokshaysky 	"	stq_c %1,%2\n"
91a6209d6dSIvan Kokshaysky 	"	beq %1,2f\n"
92a6209d6dSIvan Kokshaysky 	".subsection 2\n"
93a6209d6dSIvan Kokshaysky 	"2:	br 1b\n"
94a6209d6dSIvan Kokshaysky 	".previous"
95a6209d6dSIvan Kokshaysky 	: "=&r" (val), "=&r" (dummy), "=m" (*m)
96a6209d6dSIvan Kokshaysky 	: "rI" (val), "m" (*m) : "memory");
97a6209d6dSIvan Kokshaysky 
98a6209d6dSIvan Kokshaysky 	return val;
99a6209d6dSIvan Kokshaysky }
100a6209d6dSIvan Kokshaysky 
101a6209d6dSIvan Kokshaysky /* This function doesn't exist, so you'll get a linker error
102a6209d6dSIvan Kokshaysky    if something tries to do an invalid xchg().  */
103a6209d6dSIvan Kokshaysky extern void __xchg_called_with_bad_pointer(void);
104a6209d6dSIvan Kokshaysky 
105a6209d6dSIvan Kokshaysky static __always_inline unsigned long
106a6209d6dSIvan Kokshaysky ____xchg(, volatile void *ptr, unsigned long x, int size)
107a6209d6dSIvan Kokshaysky {
108a6209d6dSIvan Kokshaysky 	switch (size) {
109a6209d6dSIvan Kokshaysky 		case 1:
110a6209d6dSIvan Kokshaysky 			return ____xchg(_u8, ptr, x);
111a6209d6dSIvan Kokshaysky 		case 2:
112a6209d6dSIvan Kokshaysky 			return ____xchg(_u16, ptr, x);
113a6209d6dSIvan Kokshaysky 		case 4:
114a6209d6dSIvan Kokshaysky 			return ____xchg(_u32, ptr, x);
115a6209d6dSIvan Kokshaysky 		case 8:
116a6209d6dSIvan Kokshaysky 			return ____xchg(_u64, ptr, x);
117a6209d6dSIvan Kokshaysky 	}
118a6209d6dSIvan Kokshaysky 	__xchg_called_with_bad_pointer();
119a6209d6dSIvan Kokshaysky 	return x;
120a6209d6dSIvan Kokshaysky }
121a6209d6dSIvan Kokshaysky 
122a6209d6dSIvan Kokshaysky /*
123a6209d6dSIvan Kokshaysky  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
124a6209d6dSIvan Kokshaysky  * store NEW in MEM.  Return the initial value in MEM.  Success is
125a6209d6dSIvan Kokshaysky  * indicated by comparing RETURN with OLD.
126a6209d6dSIvan Kokshaysky  */
127a6209d6dSIvan Kokshaysky 
128a6209d6dSIvan Kokshaysky static inline unsigned long
____cmpxchg(_u8,volatile char * m,unsigned char old,unsigned char new)129a6209d6dSIvan Kokshaysky ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
130a6209d6dSIvan Kokshaysky {
131a6209d6dSIvan Kokshaysky 	unsigned long prev, tmp, cmp, addr64;
132a6209d6dSIvan Kokshaysky 
133a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
134a6209d6dSIvan Kokshaysky 	"	andnot	%5,7,%4\n"
135a6209d6dSIvan Kokshaysky 	"	insbl	%1,%5,%1\n"
136a6209d6dSIvan Kokshaysky 	"1:	ldq_l	%2,0(%4)\n"
137a6209d6dSIvan Kokshaysky 	"	extbl	%2,%5,%0\n"
138a6209d6dSIvan Kokshaysky 	"	cmpeq	%0,%6,%3\n"
139a6209d6dSIvan Kokshaysky 	"	beq	%3,2f\n"
140a6209d6dSIvan Kokshaysky 	"	mskbl	%2,%5,%2\n"
141a6209d6dSIvan Kokshaysky 	"	or	%1,%2,%2\n"
142a6209d6dSIvan Kokshaysky 	"	stq_c	%2,0(%4)\n"
143a6209d6dSIvan Kokshaysky 	"	beq	%2,3f\n"
144a6209d6dSIvan Kokshaysky 	"2:\n"
145a6209d6dSIvan Kokshaysky 	".subsection 2\n"
146a6209d6dSIvan Kokshaysky 	"3:	br	1b\n"
147a6209d6dSIvan Kokshaysky 	".previous"
148a6209d6dSIvan Kokshaysky 	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
149a6209d6dSIvan Kokshaysky 	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
150a6209d6dSIvan Kokshaysky 
151a6209d6dSIvan Kokshaysky 	return prev;
152a6209d6dSIvan Kokshaysky }
153a6209d6dSIvan Kokshaysky 
154a6209d6dSIvan Kokshaysky static inline unsigned long
____cmpxchg(_u16,volatile short * m,unsigned short old,unsigned short new)155a6209d6dSIvan Kokshaysky ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
156a6209d6dSIvan Kokshaysky {
157a6209d6dSIvan Kokshaysky 	unsigned long prev, tmp, cmp, addr64;
158a6209d6dSIvan Kokshaysky 
159a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
160a6209d6dSIvan Kokshaysky 	"	andnot	%5,7,%4\n"
161a6209d6dSIvan Kokshaysky 	"	inswl	%1,%5,%1\n"
162a6209d6dSIvan Kokshaysky 	"1:	ldq_l	%2,0(%4)\n"
163a6209d6dSIvan Kokshaysky 	"	extwl	%2,%5,%0\n"
164a6209d6dSIvan Kokshaysky 	"	cmpeq	%0,%6,%3\n"
165a6209d6dSIvan Kokshaysky 	"	beq	%3,2f\n"
166a6209d6dSIvan Kokshaysky 	"	mskwl	%2,%5,%2\n"
167a6209d6dSIvan Kokshaysky 	"	or	%1,%2,%2\n"
168a6209d6dSIvan Kokshaysky 	"	stq_c	%2,0(%4)\n"
169a6209d6dSIvan Kokshaysky 	"	beq	%2,3f\n"
170a6209d6dSIvan Kokshaysky 	"2:\n"
171a6209d6dSIvan Kokshaysky 	".subsection 2\n"
172a6209d6dSIvan Kokshaysky 	"3:	br	1b\n"
173a6209d6dSIvan Kokshaysky 	".previous"
174a6209d6dSIvan Kokshaysky 	: "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
175a6209d6dSIvan Kokshaysky 	: "r" ((long)m), "Ir" (old), "1" (new) : "memory");
176a6209d6dSIvan Kokshaysky 
177a6209d6dSIvan Kokshaysky 	return prev;
178a6209d6dSIvan Kokshaysky }
179a6209d6dSIvan Kokshaysky 
180a6209d6dSIvan Kokshaysky static inline unsigned long
____cmpxchg(_u32,volatile int * m,int old,int new)181a6209d6dSIvan Kokshaysky ____cmpxchg(_u32, volatile int *m, int old, int new)
182a6209d6dSIvan Kokshaysky {
183a6209d6dSIvan Kokshaysky 	unsigned long prev, cmp;
184a6209d6dSIvan Kokshaysky 
185a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
186a6209d6dSIvan Kokshaysky 	"1:	ldl_l %0,%5\n"
187a6209d6dSIvan Kokshaysky 	"	cmpeq %0,%3,%1\n"
188a6209d6dSIvan Kokshaysky 	"	beq %1,2f\n"
189a6209d6dSIvan Kokshaysky 	"	mov %4,%1\n"
190a6209d6dSIvan Kokshaysky 	"	stl_c %1,%2\n"
191a6209d6dSIvan Kokshaysky 	"	beq %1,3f\n"
192a6209d6dSIvan Kokshaysky 	"2:\n"
193a6209d6dSIvan Kokshaysky 	".subsection 2\n"
194a6209d6dSIvan Kokshaysky 	"3:	br 1b\n"
195a6209d6dSIvan Kokshaysky 	".previous"
196a6209d6dSIvan Kokshaysky 	: "=&r"(prev), "=&r"(cmp), "=m"(*m)
197a6209d6dSIvan Kokshaysky 	: "r"((long) old), "r"(new), "m"(*m) : "memory");
198a6209d6dSIvan Kokshaysky 
199a6209d6dSIvan Kokshaysky 	return prev;
200a6209d6dSIvan Kokshaysky }
201a6209d6dSIvan Kokshaysky 
202a6209d6dSIvan Kokshaysky static inline unsigned long
____cmpxchg(_u64,volatile long * m,unsigned long old,unsigned long new)203a6209d6dSIvan Kokshaysky ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
204a6209d6dSIvan Kokshaysky {
205a6209d6dSIvan Kokshaysky 	unsigned long prev, cmp;
206a6209d6dSIvan Kokshaysky 
207a6209d6dSIvan Kokshaysky 	__asm__ __volatile__(
208a6209d6dSIvan Kokshaysky 	"1:	ldq_l %0,%5\n"
209a6209d6dSIvan Kokshaysky 	"	cmpeq %0,%3,%1\n"
210a6209d6dSIvan Kokshaysky 	"	beq %1,2f\n"
211a6209d6dSIvan Kokshaysky 	"	mov %4,%1\n"
212a6209d6dSIvan Kokshaysky 	"	stq_c %1,%2\n"
213a6209d6dSIvan Kokshaysky 	"	beq %1,3f\n"
214a6209d6dSIvan Kokshaysky 	"2:\n"
215a6209d6dSIvan Kokshaysky 	".subsection 2\n"
216a6209d6dSIvan Kokshaysky 	"3:	br 1b\n"
217a6209d6dSIvan Kokshaysky 	".previous"
218a6209d6dSIvan Kokshaysky 	: "=&r"(prev), "=&r"(cmp), "=m"(*m)
219a6209d6dSIvan Kokshaysky 	: "r"((long) old), "r"(new), "m"(*m) : "memory");
220a6209d6dSIvan Kokshaysky 
221a6209d6dSIvan Kokshaysky 	return prev;
222a6209d6dSIvan Kokshaysky }
223a6209d6dSIvan Kokshaysky 
224a6209d6dSIvan Kokshaysky /* This function doesn't exist, so you'll get a linker error
225a6209d6dSIvan Kokshaysky    if something tries to do an invalid cmpxchg().  */
226a6209d6dSIvan Kokshaysky extern void __cmpxchg_called_with_bad_pointer(void);
227a6209d6dSIvan Kokshaysky 
228a6209d6dSIvan Kokshaysky static __always_inline unsigned long
229a6209d6dSIvan Kokshaysky ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
230a6209d6dSIvan Kokshaysky 	      int size)
231a6209d6dSIvan Kokshaysky {
232a6209d6dSIvan Kokshaysky 	switch (size) {
233a6209d6dSIvan Kokshaysky 		case 1:
234a6209d6dSIvan Kokshaysky 			return ____cmpxchg(_u8, ptr, old, new);
235a6209d6dSIvan Kokshaysky 		case 2:
236a6209d6dSIvan Kokshaysky 			return ____cmpxchg(_u16, ptr, old, new);
237a6209d6dSIvan Kokshaysky 		case 4:
238a6209d6dSIvan Kokshaysky 			return ____cmpxchg(_u32, ptr, old, new);
239a6209d6dSIvan Kokshaysky 		case 8:
240a6209d6dSIvan Kokshaysky 			return ____cmpxchg(_u64, ptr, old, new);
241a6209d6dSIvan Kokshaysky 	}
242a6209d6dSIvan Kokshaysky 	__cmpxchg_called_with_bad_pointer();
243a6209d6dSIvan Kokshaysky 	return old;
244a6209d6dSIvan Kokshaysky }
245a6209d6dSIvan Kokshaysky 
246a6209d6dSIvan Kokshaysky #endif
247