1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #ifndef __ASM_CSKY_ATOMIC_H
4 #define __ASM_CSKY_ATOMIC_H
5
6 #ifdef CONFIG_SMP
7 #include <asm-generic/atomic64.h>
8
9 #include <asm/cmpxchg.h>
10 #include <asm/barrier.h>
11
12 #define __atomic_acquire_fence() __bar_brarw()
13
14 #define __atomic_release_fence() __bar_brwaw()
15
arch_atomic_read(const atomic_t * v)16 static __always_inline int arch_atomic_read(const atomic_t *v)
17 {
18 return READ_ONCE(v->counter);
19 }
arch_atomic_set(atomic_t * v,int i)20 static __always_inline void arch_atomic_set(atomic_t *v, int i)
21 {
22 WRITE_ONCE(v->counter, i);
23 }
24
25 #define ATOMIC_OP(op) \
26 static __always_inline \
27 void arch_atomic_##op(int i, atomic_t *v) \
28 { \
29 unsigned long tmp; \
30 __asm__ __volatile__ ( \
31 "1: ldex.w %0, (%2) \n" \
32 " " #op " %0, %1 \n" \
33 " stex.w %0, (%2) \n" \
34 " bez %0, 1b \n" \
35 : "=&r" (tmp) \
36 : "r" (i), "r" (&v->counter) \
37 : "memory"); \
38 }
39
40 ATOMIC_OP(add)
ATOMIC_OP(sub)41 ATOMIC_OP(sub)
42 ATOMIC_OP(and)
43 ATOMIC_OP( or)
44 ATOMIC_OP(xor)
45
46 #undef ATOMIC_OP
47
48 #define ATOMIC_FETCH_OP(op) \
49 static __always_inline \
50 int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
51 { \
52 register int ret, tmp; \
53 __asm__ __volatile__ ( \
54 "1: ldex.w %0, (%3) \n" \
55 " mov %1, %0 \n" \
56 " " #op " %0, %2 \n" \
57 " stex.w %0, (%3) \n" \
58 " bez %0, 1b \n" \
59 : "=&r" (tmp), "=&r" (ret) \
60 : "r" (i), "r"(&v->counter) \
61 : "memory"); \
62 return ret; \
63 }
64
65 #define ATOMIC_OP_RETURN(op, c_op) \
66 static __always_inline \
67 int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
68 { \
69 return arch_atomic_fetch_##op##_relaxed(i, v) c_op i; \
70 }
71
72 #define ATOMIC_OPS(op, c_op) \
73 ATOMIC_FETCH_OP(op) \
74 ATOMIC_OP_RETURN(op, c_op)
75
76 ATOMIC_OPS(add, +)
77 ATOMIC_OPS(sub, -)
78
79 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed
80 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
81
82 #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
83 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
84
85 #undef ATOMIC_OPS
86 #undef ATOMIC_OP_RETURN
87
88 #define ATOMIC_OPS(op) \
89 ATOMIC_FETCH_OP(op)
90
91 ATOMIC_OPS(and)
92 ATOMIC_OPS( or)
93 ATOMIC_OPS(xor)
94
95 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
96 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
97 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
98
99 #undef ATOMIC_OPS
100
101 #undef ATOMIC_FETCH_OP
102
103 static __always_inline int
104 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
105 {
106 int prev, tmp;
107
108 __asm__ __volatile__ (
109 RELEASE_FENCE
110 "1: ldex.w %0, (%3) \n"
111 " cmpne %0, %4 \n"
112 " bf 2f \n"
113 " mov %1, %0 \n"
114 " add %1, %2 \n"
115 " stex.w %1, (%3) \n"
116 " bez %1, 1b \n"
117 FULL_FENCE
118 "2:\n"
119 : "=&r" (prev), "=&r" (tmp)
120 : "r" (a), "r" (&v->counter), "r" (u)
121 : "memory");
122
123 return prev;
124 }
125 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
126
127 static __always_inline bool
arch_atomic_inc_unless_negative(atomic_t * v)128 arch_atomic_inc_unless_negative(atomic_t *v)
129 {
130 int rc, tmp;
131
132 __asm__ __volatile__ (
133 RELEASE_FENCE
134 "1: ldex.w %0, (%2) \n"
135 " movi %1, 0 \n"
136 " blz %0, 2f \n"
137 " movi %1, 1 \n"
138 " addi %0, 1 \n"
139 " stex.w %0, (%2) \n"
140 " bez %0, 1b \n"
141 FULL_FENCE
142 "2:\n"
143 : "=&r" (tmp), "=&r" (rc)
144 : "r" (&v->counter)
145 : "memory");
146
147 return tmp ? true : false;
148
149 }
150 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
151
152 static __always_inline bool
arch_atomic_dec_unless_positive(atomic_t * v)153 arch_atomic_dec_unless_positive(atomic_t *v)
154 {
155 int rc, tmp;
156
157 __asm__ __volatile__ (
158 RELEASE_FENCE
159 "1: ldex.w %0, (%2) \n"
160 " movi %1, 0 \n"
161 " bhz %0, 2f \n"
162 " movi %1, 1 \n"
163 " subi %0, 1 \n"
164 " stex.w %0, (%2) \n"
165 " bez %0, 1b \n"
166 FULL_FENCE
167 "2:\n"
168 : "=&r" (tmp), "=&r" (rc)
169 : "r" (&v->counter)
170 : "memory");
171
172 return tmp ? true : false;
173 }
174 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
175
176 static __always_inline int
arch_atomic_dec_if_positive(atomic_t * v)177 arch_atomic_dec_if_positive(atomic_t *v)
178 {
179 int dec, tmp;
180
181 __asm__ __volatile__ (
182 RELEASE_FENCE
183 "1: ldex.w %0, (%2) \n"
184 " subi %1, %0, 1 \n"
185 " blz %1, 2f \n"
186 " stex.w %1, (%2) \n"
187 " bez %1, 1b \n"
188 FULL_FENCE
189 "2:\n"
190 : "=&r" (dec), "=&r" (tmp)
191 : "r" (&v->counter)
192 : "memory");
193
194 return dec - 1;
195 }
196 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
197
198 #else
199 #include <asm-generic/atomic.h>
200 #endif
201
202 #endif /* __ASM_CSKY_ATOMIC_H */
203