xref: /linux/arch/loongarch/include/asm/atomic-amo.h (revision 9551a26f17d9445eed497bd7c639d48dfc3c0af4)
1*79974cc3SHuacai Chen /* SPDX-License-Identifier: GPL-2.0 */
2*79974cc3SHuacai Chen /*
3*79974cc3SHuacai Chen  * Atomic operations (AMO).
4*79974cc3SHuacai Chen  *
5*79974cc3SHuacai Chen  * Copyright (C) 2020-2025 Loongson Technology Corporation Limited
6*79974cc3SHuacai Chen  */
7*79974cc3SHuacai Chen 
8*79974cc3SHuacai Chen #ifndef _ASM_ATOMIC_AMO_H
9*79974cc3SHuacai Chen #define _ASM_ATOMIC_AMO_H
10*79974cc3SHuacai Chen 
11*79974cc3SHuacai Chen #include <linux/types.h>
12*79974cc3SHuacai Chen #include <asm/barrier.h>
13*79974cc3SHuacai Chen #include <asm/cmpxchg.h>
14*79974cc3SHuacai Chen 
15*79974cc3SHuacai Chen #define ATOMIC_OP(op, I, asm_op)					\
16*79974cc3SHuacai Chen static inline void arch_atomic_##op(int i, atomic_t *v)			\
17*79974cc3SHuacai Chen {									\
18*79974cc3SHuacai Chen 	__asm__ __volatile__(						\
19*79974cc3SHuacai Chen 	"am"#asm_op".w" " $zero, %1, %0	\n"				\
20*79974cc3SHuacai Chen 	: "+ZB" (v->counter)						\
21*79974cc3SHuacai Chen 	: "r" (I)							\
22*79974cc3SHuacai Chen 	: "memory");							\
23*79974cc3SHuacai Chen }
24*79974cc3SHuacai Chen 
25*79974cc3SHuacai Chen #define ATOMIC_OP_RETURN(op, I, asm_op, c_op, mb, suffix)		\
26*79974cc3SHuacai Chen static inline int arch_atomic_##op##_return##suffix(int i, atomic_t *v)	\
27*79974cc3SHuacai Chen {									\
28*79974cc3SHuacai Chen 	int result;							\
29*79974cc3SHuacai Chen 									\
30*79974cc3SHuacai Chen 	__asm__ __volatile__(						\
31*79974cc3SHuacai Chen 	"am"#asm_op#mb".w" " %1, %2, %0		\n"			\
32*79974cc3SHuacai Chen 	: "+ZB" (v->counter), "=&r" (result)				\
33*79974cc3SHuacai Chen 	: "r" (I)							\
34*79974cc3SHuacai Chen 	: "memory");							\
35*79974cc3SHuacai Chen 									\
36*79974cc3SHuacai Chen 	return result c_op I;						\
37*79974cc3SHuacai Chen }
38*79974cc3SHuacai Chen 
39*79974cc3SHuacai Chen #define ATOMIC_FETCH_OP(op, I, asm_op, mb, suffix)			\
40*79974cc3SHuacai Chen static inline int arch_atomic_fetch_##op##suffix(int i, atomic_t *v)	\
41*79974cc3SHuacai Chen {									\
42*79974cc3SHuacai Chen 	int result;							\
43*79974cc3SHuacai Chen 									\
44*79974cc3SHuacai Chen 	__asm__ __volatile__(						\
45*79974cc3SHuacai Chen 	"am"#asm_op#mb".w" " %1, %2, %0		\n"			\
46*79974cc3SHuacai Chen 	: "+ZB" (v->counter), "=&r" (result)				\
47*79974cc3SHuacai Chen 	: "r" (I)							\
48*79974cc3SHuacai Chen 	: "memory");							\
49*79974cc3SHuacai Chen 									\
50*79974cc3SHuacai Chen 	return result;							\
51*79974cc3SHuacai Chen }
52*79974cc3SHuacai Chen 
53*79974cc3SHuacai Chen #define ATOMIC_OPS(op, I, asm_op, c_op)					\
54*79974cc3SHuacai Chen 	ATOMIC_OP(op, I, asm_op)					\
55*79974cc3SHuacai Chen 	ATOMIC_OP_RETURN(op, I, asm_op, c_op, _db,         )		\
56*79974cc3SHuacai Chen 	ATOMIC_OP_RETURN(op, I, asm_op, c_op,    , _relaxed)		\
57*79974cc3SHuacai Chen 	ATOMIC_FETCH_OP(op, I, asm_op, _db,         )			\
58*79974cc3SHuacai Chen 	ATOMIC_FETCH_OP(op, I, asm_op,    , _relaxed)
59*79974cc3SHuacai Chen 
60*79974cc3SHuacai Chen ATOMIC_OPS(add, i, add, +)
61*79974cc3SHuacai Chen ATOMIC_OPS(sub, -i, add, +)
62*79974cc3SHuacai Chen 
63*79974cc3SHuacai Chen #define arch_atomic_add_return		arch_atomic_add_return
64*79974cc3SHuacai Chen #define arch_atomic_add_return_acquire	arch_atomic_add_return
65*79974cc3SHuacai Chen #define arch_atomic_add_return_release	arch_atomic_add_return
66*79974cc3SHuacai Chen #define arch_atomic_add_return_relaxed	arch_atomic_add_return_relaxed
67*79974cc3SHuacai Chen #define arch_atomic_sub_return		arch_atomic_sub_return
68*79974cc3SHuacai Chen #define arch_atomic_sub_return_acquire	arch_atomic_sub_return
69*79974cc3SHuacai Chen #define arch_atomic_sub_return_release	arch_atomic_sub_return
70*79974cc3SHuacai Chen #define arch_atomic_sub_return_relaxed	arch_atomic_sub_return_relaxed
71*79974cc3SHuacai Chen #define arch_atomic_fetch_add		arch_atomic_fetch_add
72*79974cc3SHuacai Chen #define arch_atomic_fetch_add_acquire	arch_atomic_fetch_add
73*79974cc3SHuacai Chen #define arch_atomic_fetch_add_release	arch_atomic_fetch_add
74*79974cc3SHuacai Chen #define arch_atomic_fetch_add_relaxed	arch_atomic_fetch_add_relaxed
75*79974cc3SHuacai Chen #define arch_atomic_fetch_sub		arch_atomic_fetch_sub
76*79974cc3SHuacai Chen #define arch_atomic_fetch_sub_acquire	arch_atomic_fetch_sub
77*79974cc3SHuacai Chen #define arch_atomic_fetch_sub_release	arch_atomic_fetch_sub
78*79974cc3SHuacai Chen #define arch_atomic_fetch_sub_relaxed	arch_atomic_fetch_sub_relaxed
79*79974cc3SHuacai Chen 
80*79974cc3SHuacai Chen #undef ATOMIC_OPS
81*79974cc3SHuacai Chen 
82*79974cc3SHuacai Chen #define ATOMIC_OPS(op, I, asm_op)					\
83*79974cc3SHuacai Chen 	ATOMIC_OP(op, I, asm_op)					\
84*79974cc3SHuacai Chen 	ATOMIC_FETCH_OP(op, I, asm_op, _db,         )			\
85*79974cc3SHuacai Chen 	ATOMIC_FETCH_OP(op, I, asm_op,    , _relaxed)
86*79974cc3SHuacai Chen 
87*79974cc3SHuacai Chen ATOMIC_OPS(and, i, and)
88*79974cc3SHuacai Chen ATOMIC_OPS(or, i, or)
89*79974cc3SHuacai Chen ATOMIC_OPS(xor, i, xor)
90*79974cc3SHuacai Chen 
91*79974cc3SHuacai Chen #define arch_atomic_fetch_and		arch_atomic_fetch_and
92*79974cc3SHuacai Chen #define arch_atomic_fetch_and_acquire	arch_atomic_fetch_and
93*79974cc3SHuacai Chen #define arch_atomic_fetch_and_release	arch_atomic_fetch_and
94*79974cc3SHuacai Chen #define arch_atomic_fetch_and_relaxed	arch_atomic_fetch_and_relaxed
95*79974cc3SHuacai Chen #define arch_atomic_fetch_or		arch_atomic_fetch_or
96*79974cc3SHuacai Chen #define arch_atomic_fetch_or_acquire	arch_atomic_fetch_or
97*79974cc3SHuacai Chen #define arch_atomic_fetch_or_release	arch_atomic_fetch_or
98*79974cc3SHuacai Chen #define arch_atomic_fetch_or_relaxed	arch_atomic_fetch_or_relaxed
99*79974cc3SHuacai Chen #define arch_atomic_fetch_xor		arch_atomic_fetch_xor
100*79974cc3SHuacai Chen #define arch_atomic_fetch_xor_acquire	arch_atomic_fetch_xor
101*79974cc3SHuacai Chen #define arch_atomic_fetch_xor_release	arch_atomic_fetch_xor
102*79974cc3SHuacai Chen #define arch_atomic_fetch_xor_relaxed	arch_atomic_fetch_xor_relaxed
103*79974cc3SHuacai Chen 
104*79974cc3SHuacai Chen #undef ATOMIC_OPS
105*79974cc3SHuacai Chen #undef ATOMIC_FETCH_OP
106*79974cc3SHuacai Chen #undef ATOMIC_OP_RETURN
107*79974cc3SHuacai Chen #undef ATOMIC_OP
108*79974cc3SHuacai Chen 
109*79974cc3SHuacai Chen #ifdef CONFIG_64BIT
110*79974cc3SHuacai Chen 
111*79974cc3SHuacai Chen #define ATOMIC64_OP(op, I, asm_op)					\
112*79974cc3SHuacai Chen static inline void arch_atomic64_##op(long i, atomic64_t *v)		\
113*79974cc3SHuacai Chen {									\
114*79974cc3SHuacai Chen 	__asm__ __volatile__(						\
115*79974cc3SHuacai Chen 	"am"#asm_op".d " " $zero, %1, %0	\n"			\
116*79974cc3SHuacai Chen 	: "+ZB" (v->counter)						\
117*79974cc3SHuacai Chen 	: "r" (I)							\
118*79974cc3SHuacai Chen 	: "memory");							\
119*79974cc3SHuacai Chen }
120*79974cc3SHuacai Chen 
121*79974cc3SHuacai Chen #define ATOMIC64_OP_RETURN(op, I, asm_op, c_op, mb, suffix)			\
122*79974cc3SHuacai Chen static inline long arch_atomic64_##op##_return##suffix(long i, atomic64_t *v)	\
123*79974cc3SHuacai Chen {										\
124*79974cc3SHuacai Chen 	long result;								\
125*79974cc3SHuacai Chen 	__asm__ __volatile__(							\
126*79974cc3SHuacai Chen 	"am"#asm_op#mb".d " " %1, %2, %0		\n"			\
127*79974cc3SHuacai Chen 	: "+ZB" (v->counter), "=&r" (result)					\
128*79974cc3SHuacai Chen 	: "r" (I)								\
129*79974cc3SHuacai Chen 	: "memory");								\
130*79974cc3SHuacai Chen 										\
131*79974cc3SHuacai Chen 	return result c_op I;							\
132*79974cc3SHuacai Chen }
133*79974cc3SHuacai Chen 
134*79974cc3SHuacai Chen #define ATOMIC64_FETCH_OP(op, I, asm_op, mb, suffix)				\
135*79974cc3SHuacai Chen static inline long arch_atomic64_fetch_##op##suffix(long i, atomic64_t *v)	\
136*79974cc3SHuacai Chen {										\
137*79974cc3SHuacai Chen 	long result;								\
138*79974cc3SHuacai Chen 										\
139*79974cc3SHuacai Chen 	__asm__ __volatile__(							\
140*79974cc3SHuacai Chen 	"am"#asm_op#mb".d " " %1, %2, %0		\n"			\
141*79974cc3SHuacai Chen 	: "+ZB" (v->counter), "=&r" (result)					\
142*79974cc3SHuacai Chen 	: "r" (I)								\
143*79974cc3SHuacai Chen 	: "memory");								\
144*79974cc3SHuacai Chen 										\
145*79974cc3SHuacai Chen 	return result;								\
146*79974cc3SHuacai Chen }
147*79974cc3SHuacai Chen 
148*79974cc3SHuacai Chen #define ATOMIC64_OPS(op, I, asm_op, c_op)				      \
149*79974cc3SHuacai Chen 	ATOMIC64_OP(op, I, asm_op)					      \
150*79974cc3SHuacai Chen 	ATOMIC64_OP_RETURN(op, I, asm_op, c_op, _db,         )		      \
151*79974cc3SHuacai Chen 	ATOMIC64_OP_RETURN(op, I, asm_op, c_op,    , _relaxed)		      \
152*79974cc3SHuacai Chen 	ATOMIC64_FETCH_OP(op, I, asm_op, _db,         )			      \
153*79974cc3SHuacai Chen 	ATOMIC64_FETCH_OP(op, I, asm_op,    , _relaxed)
154*79974cc3SHuacai Chen 
155*79974cc3SHuacai Chen ATOMIC64_OPS(add, i, add, +)
156*79974cc3SHuacai Chen ATOMIC64_OPS(sub, -i, add, +)
157*79974cc3SHuacai Chen 
158*79974cc3SHuacai Chen #define arch_atomic64_add_return		arch_atomic64_add_return
159*79974cc3SHuacai Chen #define arch_atomic64_add_return_acquire	arch_atomic64_add_return
160*79974cc3SHuacai Chen #define arch_atomic64_add_return_release	arch_atomic64_add_return
161*79974cc3SHuacai Chen #define arch_atomic64_add_return_relaxed	arch_atomic64_add_return_relaxed
162*79974cc3SHuacai Chen #define arch_atomic64_sub_return		arch_atomic64_sub_return
163*79974cc3SHuacai Chen #define arch_atomic64_sub_return_acquire	arch_atomic64_sub_return
164*79974cc3SHuacai Chen #define arch_atomic64_sub_return_release	arch_atomic64_sub_return
165*79974cc3SHuacai Chen #define arch_atomic64_sub_return_relaxed	arch_atomic64_sub_return_relaxed
166*79974cc3SHuacai Chen #define arch_atomic64_fetch_add			arch_atomic64_fetch_add
167*79974cc3SHuacai Chen #define arch_atomic64_fetch_add_acquire		arch_atomic64_fetch_add
168*79974cc3SHuacai Chen #define arch_atomic64_fetch_add_release		arch_atomic64_fetch_add
169*79974cc3SHuacai Chen #define arch_atomic64_fetch_add_relaxed		arch_atomic64_fetch_add_relaxed
170*79974cc3SHuacai Chen #define arch_atomic64_fetch_sub			arch_atomic64_fetch_sub
171*79974cc3SHuacai Chen #define arch_atomic64_fetch_sub_acquire		arch_atomic64_fetch_sub
172*79974cc3SHuacai Chen #define arch_atomic64_fetch_sub_release		arch_atomic64_fetch_sub
173*79974cc3SHuacai Chen #define arch_atomic64_fetch_sub_relaxed		arch_atomic64_fetch_sub_relaxed
174*79974cc3SHuacai Chen 
175*79974cc3SHuacai Chen #undef ATOMIC64_OPS
176*79974cc3SHuacai Chen 
177*79974cc3SHuacai Chen #define ATOMIC64_OPS(op, I, asm_op)					      \
178*79974cc3SHuacai Chen 	ATOMIC64_OP(op, I, asm_op)					      \
179*79974cc3SHuacai Chen 	ATOMIC64_FETCH_OP(op, I, asm_op, _db,         )			      \
180*79974cc3SHuacai Chen 	ATOMIC64_FETCH_OP(op, I, asm_op,    , _relaxed)
181*79974cc3SHuacai Chen 
182*79974cc3SHuacai Chen ATOMIC64_OPS(and, i, and)
183*79974cc3SHuacai Chen ATOMIC64_OPS(or, i, or)
184*79974cc3SHuacai Chen ATOMIC64_OPS(xor, i, xor)
185*79974cc3SHuacai Chen 
186*79974cc3SHuacai Chen #define arch_atomic64_fetch_and		arch_atomic64_fetch_and
187*79974cc3SHuacai Chen #define arch_atomic64_fetch_and_acquire	arch_atomic64_fetch_and
188*79974cc3SHuacai Chen #define arch_atomic64_fetch_and_release	arch_atomic64_fetch_and
189*79974cc3SHuacai Chen #define arch_atomic64_fetch_and_relaxed	arch_atomic64_fetch_and_relaxed
190*79974cc3SHuacai Chen #define arch_atomic64_fetch_or		arch_atomic64_fetch_or
191*79974cc3SHuacai Chen #define arch_atomic64_fetch_or_acquire	arch_atomic64_fetch_or
192*79974cc3SHuacai Chen #define arch_atomic64_fetch_or_release	arch_atomic64_fetch_or
193*79974cc3SHuacai Chen #define arch_atomic64_fetch_or_relaxed	arch_atomic64_fetch_or_relaxed
194*79974cc3SHuacai Chen #define arch_atomic64_fetch_xor		arch_atomic64_fetch_xor
195*79974cc3SHuacai Chen #define arch_atomic64_fetch_xor_acquire	arch_atomic64_fetch_xor
196*79974cc3SHuacai Chen #define arch_atomic64_fetch_xor_release	arch_atomic64_fetch_xor
197*79974cc3SHuacai Chen #define arch_atomic64_fetch_xor_relaxed	arch_atomic64_fetch_xor_relaxed
198*79974cc3SHuacai Chen 
199*79974cc3SHuacai Chen #undef ATOMIC64_OPS
200*79974cc3SHuacai Chen #undef ATOMIC64_FETCH_OP
201*79974cc3SHuacai Chen #undef ATOMIC64_OP_RETURN
202*79974cc3SHuacai Chen #undef ATOMIC64_OP
203*79974cc3SHuacai Chen 
204*79974cc3SHuacai Chen #endif
205*79974cc3SHuacai Chen 
206*79974cc3SHuacai Chen #endif /* _ASM_ATOMIC_AMO_H */
207