xref: /linux/arch/s390/include/asm/cmpxchg.h (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 /*
2  * Copyright IBM Corp. 1999, 2011
3  *
4  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  */
6 
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
9 
10 #include <linux/mmdebug.h>
11 #include <linux/types.h>
12 #include <linux/bug.h>
13 
14 #define cmpxchg(ptr, o, n)						\
15 ({									\
16 	__typeof__(*(ptr)) __o = (o);					\
17 	__typeof__(*(ptr)) __n = (n);					\
18 	(__typeof__(*(ptr))) __sync_val_compare_and_swap((ptr),__o,__n);\
19 })
20 
21 #define cmpxchg64	cmpxchg
22 #define cmpxchg_local	cmpxchg
23 #define cmpxchg64_local	cmpxchg
24 
25 #define xchg(ptr, x)							\
26 ({									\
27 	__typeof__(ptr) __ptr = (ptr);					\
28 	__typeof__(*(ptr)) __old;					\
29 	do {								\
30 		__old = *__ptr;						\
31 	} while (!__sync_bool_compare_and_swap(__ptr, __old, x));	\
32 	__old;								\
33 })
34 
35 #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn)		\
36 ({									\
37 	register __typeof__(*(p1)) __old1 asm("2") = (o1);		\
38 	register __typeof__(*(p2)) __old2 asm("3") = (o2);		\
39 	register __typeof__(*(p1)) __new1 asm("4") = (n1);		\
40 	register __typeof__(*(p2)) __new2 asm("5") = (n2);		\
41 	int cc;								\
42 	asm volatile(							\
43 			insn   " %[old],%[new],%[ptr]\n"		\
44 		"	ipm	%[cc]\n"				\
45 		"	srl	%[cc],28"				\
46 		: [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2)	\
47 		: [new] "d" (__new1), "d" (__new2),			\
48 		  [ptr] "Q" (*(p1)), "Q" (*(p2))			\
49 		: "memory", "cc");					\
50 	!cc;								\
51 })
52 
53 #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
54 	__cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
55 
56 #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
57 	__cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
58 
59 extern void __cmpxchg_double_called_with_bad_pointer(void);
60 
61 #define __cmpxchg_double(p1, p2, o1, o2, n1, n2)			\
62 ({									\
63 	int __ret;							\
64 	switch (sizeof(*(p1))) {					\
65 	case 4:								\
66 		__ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2);	\
67 		break;							\
68 	case 8:								\
69 		__ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2);	\
70 		break;							\
71 	default:							\
72 		__cmpxchg_double_called_with_bad_pointer();		\
73 	}								\
74 	__ret;								\
75 })
76 
77 #define cmpxchg_double(p1, p2, o1, o2, n1, n2)				\
78 ({									\
79 	__typeof__(p1) __p1 = (p1);					\
80 	__typeof__(p2) __p2 = (p2);					\
81 	BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));			\
82 	BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));			\
83 	VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
84 	__cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2);			\
85 })
86 
87 #define system_has_cmpxchg_double()	1
88 
89 #endif /* __ASM_CMPXCHG_H */
90