xref: /linux/arch/csky/include/asm/cmpxchg.h (revision 621cde16e49b3ecf7d59a8106a20aaebfb4a59a9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 #ifndef __ASM_CSKY_CMPXCHG_H
4 #define __ASM_CSKY_CMPXCHG_H
5 
6 #ifdef CONFIG_SMP
7 #include <linux/bug.h>
8 #include <asm/barrier.h>
9 #include <linux/cmpxchg-emu.h>
10 
11 #define __xchg_relaxed(new, ptr, size)				\
12 ({								\
13 	__typeof__(ptr) __ptr = (ptr);				\
14 	__typeof__(new) __new = (new);				\
15 	__typeof__(*(ptr)) __ret;				\
16 	unsigned long tmp;					\
17 	switch (size) {						\
18 	case 2: {						\
19 		u32 ret;					\
20 		u32 shif = ((ulong)__ptr & 2) ? 16 : 0;		\
21 		u32 mask = 0xffff << shif;			\
22 		__ptr = (__typeof__(ptr))((ulong)__ptr & ~2);	\
23 		__asm__ __volatile__ (				\
24 			"1:	ldex.w %0, (%4)\n"		\
25 			"	and    %1, %0, %2\n"		\
26 			"	or     %1, %1, %3\n"		\
27 			"	stex.w %1, (%4)\n"		\
28 			"	bez    %1, 1b\n"		\
29 			: "=&r" (ret), "=&r" (tmp)		\
30 			: "r" (~mask),				\
31 			  "r" ((u32)__new << shif),		\
32 			  "r" (__ptr)				\
33 			: "memory");				\
34 		__ret = (__typeof__(*(ptr)))			\
35 			((ret & mask) >> shif);			\
36 		break;						\
37 	}							\
38 	case 4:							\
39 		asm volatile (					\
40 		"1:	ldex.w		%0, (%3) \n"		\
41 		"	mov		%1, %2   \n"		\
42 		"	stex.w		%1, (%3) \n"		\
43 		"	bez		%1, 1b   \n"		\
44 			: "=&r" (__ret), "=&r" (tmp)		\
45 			: "r" (__new), "r"(__ptr)		\
46 			:);					\
47 		break;						\
48 	default:						\
49 		BUILD_BUG();					\
50 	}							\
51 	__ret;							\
52 })
53 
54 #define arch_xchg_relaxed(ptr, x) \
55 		(__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
56 
57 #define __cmpxchg_relaxed(ptr, old, new, size)			\
58 ({								\
59 	__typeof__(ptr) __ptr = (ptr);				\
60 	__typeof__(new) __new = (new);				\
61 	__typeof__(new) __tmp;					\
62 	__typeof__(old) __old = (old);				\
63 	__typeof__(*(ptr)) __ret;				\
64 	switch (size) {						\
65 	case 1:							\
66 		__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
67 		break;						\
68 	case 4:							\
69 		asm volatile (					\
70 		"1:	ldex.w		%0, (%3) \n"		\
71 		"	cmpne		%0, %4   \n"		\
72 		"	bt		2f       \n"		\
73 		"	mov		%1, %2   \n"		\
74 		"	stex.w		%1, (%3) \n"		\
75 		"	bez		%1, 1b   \n"		\
76 		"2:				 \n"		\
77 			: "=&r" (__ret), "=&r" (__tmp)		\
78 			: "r" (__new), "r"(__ptr), "r"(__old)	\
79 			:);					\
80 		break;						\
81 	default:						\
82 		BUILD_BUG();					\
83 	}							\
84 	__ret;							\
85 })
86 
87 #define arch_cmpxchg_relaxed(ptr, o, n) \
88 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
89 
90 #define __cmpxchg_acquire(ptr, old, new, size)			\
91 ({								\
92 	__typeof__(ptr) __ptr = (ptr);				\
93 	__typeof__(new) __new = (new);				\
94 	__typeof__(new) __tmp;					\
95 	__typeof__(old) __old = (old);				\
96 	__typeof__(*(ptr)) __ret;				\
97 	switch (size) {						\
98 	case 1:							\
99 		__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
100 		break;						\
101 	case 4:							\
102 		asm volatile (					\
103 		"1:	ldex.w		%0, (%3) \n"		\
104 		"	cmpne		%0, %4   \n"		\
105 		"	bt		2f       \n"		\
106 		"	mov		%1, %2   \n"		\
107 		"	stex.w		%1, (%3) \n"		\
108 		"	bez		%1, 1b   \n"		\
109 		ACQUIRE_FENCE					\
110 		"2:				 \n"		\
111 			: "=&r" (__ret), "=&r" (__tmp)		\
112 			: "r" (__new), "r"(__ptr), "r"(__old)	\
113 			:);					\
114 		break;						\
115 	default:						\
116 		BUILD_BUG();					\
117 	}							\
118 	__ret;							\
119 })
120 
121 #define arch_cmpxchg_acquire(ptr, o, n) \
122 	(__cmpxchg_acquire((ptr), (o), (n), sizeof(*(ptr))))
123 
124 #define __cmpxchg(ptr, old, new, size)				\
125 ({								\
126 	__typeof__(ptr) __ptr = (ptr);				\
127 	__typeof__(new) __new = (new);				\
128 	__typeof__(new) __tmp;					\
129 	__typeof__(old) __old = (old);				\
130 	__typeof__(*(ptr)) __ret;				\
131 	switch (size) {						\
132 	case 1:							\
133 		__ret = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *)__ptr, (uintptr_t)__old, (uintptr_t)__new); \
134 		break;						\
135 	case 4:							\
136 		asm volatile (					\
137 		RELEASE_FENCE					\
138 		"1:	ldex.w		%0, (%3) \n"		\
139 		"	cmpne		%0, %4   \n"		\
140 		"	bt		2f       \n"		\
141 		"	mov		%1, %2   \n"		\
142 		"	stex.w		%1, (%3) \n"		\
143 		"	bez		%1, 1b   \n"		\
144 		FULL_FENCE					\
145 		"2:				 \n"		\
146 			: "=&r" (__ret), "=&r" (__tmp)		\
147 			: "r" (__new), "r"(__ptr), "r"(__old)	\
148 			:);					\
149 		break;						\
150 	default:						\
151 		BUILD_BUG();					\
152 	}							\
153 	__ret;							\
154 })
155 
156 #define arch_cmpxchg(ptr, o, n)					\
157 	(__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
158 
159 #define arch_cmpxchg_local(ptr, o, n)				\
160 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
161 #else
162 #include <asm-generic/cmpxchg.h>
163 #endif
164 
165 #endif /* __ASM_CSKY_CMPXCHG_H */
166