xref: /freebsd/sys/arm64/include/atomic.h (revision 1f4bcc459a76b7aa664f3fd557684cd0ba6da352)
1 /*-
2  * Copyright (c) 2013 Andrew Turner <andrew@freebsd.org>
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  *
26  * $FreeBSD$
27  */
28 
29 #ifndef	_MACHINE_ATOMIC_H_
30 #define	_MACHINE_ATOMIC_H_
31 
32 #define	isb()		__asm __volatile("isb" : : : "memory")
33 
34 /*
35  * Options for DMB and DSB:
36  *	oshld	Outer Shareable, load
37  *	oshst	Outer Shareable, store
38  *	osh	Outer Shareable, all
39  *	nshld	Non-shareable, load
40  *	nshst	Non-shareable, store
41  *	nsh	Non-shareable, all
42  *	ishld	Inner Shareable, load
43  *	ishst	Inner Shareable, store
44  *	ish	Inner Shareable, all
45  *	ld	Full system, load
46  *	st	Full system, store
47  *	sy	Full system, all
48  */
49 #define	dsb(opt)	__asm __volatile("dsb " __STRING(opt) : : : "memory")
50 #define	dmb(opt)	__asm __volatile("dmb " __STRING(opt) : : : "memory")
51 
52 #define	mb()	dmb(sy)	/* Full system memory barrier all */
53 #define	wmb()	dmb(st)	/* Full system memory barrier store */
54 #define	rmb()	dmb(ld)	/* Full system memory barrier load */
55 
56 #define	ATOMIC_OP(op, asm_op, bar, a, l)				\
57 static __inline void							\
58 atomic_##op##_##bar##32(volatile uint32_t *p, uint32_t val)		\
59 {									\
60 	uint32_t tmp;							\
61 	int res;							\
62 									\
63 	__asm __volatile(						\
64 	    "1: ld"#a"xr   %w0, [%2]      \n"				\
65 	    "   "#asm_op"  %w0, %w0, %w3  \n"				\
66 	    "   st"#l"xr   %w1, %w0, [%2] \n"				\
67             "   cbnz       %w1, 1b        \n"				\
68 	    : "=&r"(tmp), "=&r"(res)					\
69 	    : "r" (p), "r" (val)					\
70 	    : "memory"							\
71 	);								\
72 }									\
73 									\
74 static __inline void							\
75 atomic_##op##_##bar##64(volatile uint64_t *p, uint64_t val)		\
76 {									\
77 	uint64_t tmp;							\
78 	int res;							\
79 									\
80 	__asm __volatile(						\
81 	    "1: ld"#a"xr   %0, [%2]      \n"				\
82 	    "   "#asm_op"  %0, %0, %3    \n"				\
83 	    "   st"#l"xr   %w1, %0, [%2] \n"				\
84             "   cbnz       %w1, 1b       \n"				\
85 	    : "=&r"(tmp), "=&r"(res)					\
86 	    : "r" (p), "r" (val)					\
87 	    : "memory"							\
88 	);								\
89 }
90 
91 #define	ATOMIC(op, asm_op)						\
92     ATOMIC_OP(op, asm_op,     ,  ,  )					\
93     ATOMIC_OP(op, asm_op, acq_, a,  )					\
94     ATOMIC_OP(op, asm_op, rel_,  , l)					\
95 
96 ATOMIC(add,      add)
97 ATOMIC(clear,    bic)
98 ATOMIC(set,      orr)
99 ATOMIC(subtract, sub)
100 
101 #define	ATOMIC_CMPSET(bar, a, l)					\
102 static __inline int							\
103 atomic_cmpset_##bar##32(volatile uint32_t *p, uint32_t cmpval,		\
104     uint32_t newval)							\
105 {									\
106 	uint32_t tmp;							\
107 	int res;							\
108 									\
109 	__asm __volatile(						\
110 	    "1: mov      %w1, #1        \n"				\
111 	    "   ld"#a"xr %w0, [%2]      \n"				\
112 	    "   cmp      %w0, %w3       \n"				\
113 	    "   b.ne     2f             \n"				\
114 	    "   st"#l"xr %w1, %w4, [%2] \n"				\
115             "   cbnz     %w1, 1b        \n"				\
116 	    "2:"							\
117 	    : "=&r"(tmp), "=&r"(res)					\
118 	    : "r" (p), "r" (cmpval), "r" (newval)			\
119 	    : "cc", "memory"							\
120 	);								\
121 									\
122 	return (!res);							\
123 }									\
124 									\
125 static __inline int							\
126 atomic_cmpset_##bar##64(volatile uint64_t *p, uint64_t cmpval,		\
127     uint64_t newval)							\
128 {									\
129 	uint64_t tmp;							\
130 	int res;							\
131 									\
132 	__asm __volatile(						\
133 	    "1: mov      %w1, #1       \n"				\
134 	    "   ld"#a"xr %0, [%2]      \n"				\
135 	    "   cmp      %0, %3        \n"				\
136 	    "   b.ne     2f            \n"				\
137 	    "   st"#l"xr %w1, %4, [%2] \n"				\
138             "   cbnz     %w1, 1b       \n"				\
139 	    "2:"							\
140 	    : "=&r"(tmp), "=&r"(res)					\
141 	    : "r" (p), "r" (cmpval), "r" (newval)			\
142 	    : "cc", "memory"							\
143 	);								\
144 									\
145 	return (!res);							\
146 }
147 
148 ATOMIC_CMPSET(    ,  , )
149 ATOMIC_CMPSET(acq_, a, )
150 ATOMIC_CMPSET(rel_,  ,l)
151 
152 static __inline uint32_t
153 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
154 {
155 	uint32_t tmp, ret;
156 	int res;
157 
158 	__asm __volatile(
159 	    "1: ldxr	%w2, [%3]      \n"
160 	    "   add	%w0, %w2, %w4  \n"
161 	    "   stxr	%w1, %w0, [%3] \n"
162             "   cbnz	%w1, 1b        \n"
163 	    : "=&r"(tmp), "=&r"(res), "=&r"(ret)
164 	    : "r" (p), "r" (val)
165 	    : "memory"
166 	);
167 
168 	return (ret);
169 }
170 
171 static __inline uint64_t
172 atomic_fetchadd_64(volatile uint64_t *p, uint64_t val)
173 {
174 	uint64_t tmp, ret;
175 	int res;
176 
177 	__asm __volatile(
178 	    "1: ldxr	%2, [%3]      \n"
179 	    "   add	%0, %2, %4    \n"
180 	    "   stxr	%w1, %0, [%3] \n"
181             "   cbnz	%w1, 1b       \n"
182 	    : "=&r"(tmp), "=&r"(res), "=&r"(ret)
183 	    : "r" (p), "r" (val)
184 	    : "memory"
185 	);
186 
187 	return (ret);
188 }
189 
190 static __inline uint32_t
191 atomic_readandclear_32(volatile uint32_t *p)
192 {
193 	uint32_t ret;
194 	int res;
195 
196 	__asm __volatile(
197 	    "1: ldxr	%w1, [%2]      \n"
198 	    "   stxr	%w0, wzr, [%2] \n"
199             "   cbnz	%w0, 1b        \n"
200 	    : "=&r"(res), "=&r"(ret)
201 	    : "r" (p)
202 	    : "memory"
203 	);
204 
205 	return (ret);
206 }
207 
208 static __inline uint64_t
209 atomic_readandclear_64(volatile uint64_t *p)
210 {
211 	uint64_t ret;
212 	int res;
213 
214 	__asm __volatile(
215 	    "1: ldxr	%1, [%2]      \n"
216 	    "   stxr	%w0, xzr, [%2] \n"
217             "   cbnz	%w0, 1b        \n"
218 	    : "=&r"(res), "=&r"(ret)
219 	    : "r" (p)
220 	    : "memory"
221 	);
222 
223 	return (ret);
224 }
225 
226 static __inline uint32_t
227 atomic_swap_32(volatile uint32_t *p, uint32_t val)
228 {
229 	uint32_t ret;
230 	int res;
231 
232 	__asm __volatile(
233 	    "1: ldxr	%w0, [%2]      \n"
234 	    "   stxr	%w1, %w3, [%2] \n"
235 	    "   cbnz	%w1, 1b        \n"
236 	    : "=&r"(ret), "=&r"(res)
237 	    : "r" (p), "r" (val)
238 	    : "memory"
239 	);
240 
241 	return (ret);
242 }
243 
244 static __inline uint64_t
245 atomic_swap_64(volatile uint64_t *p, uint64_t val)
246 {
247 	uint64_t ret;
248 	int res;
249 
250 	__asm __volatile(
251 	    "1: ldxr	%0, [%2]      \n"
252 	    "   stxr	%w1, %3, [%2] \n"
253 	    "   cbnz	%w1, 1b       \n"
254 	    : "=&r"(ret), "=&r"(res)
255 	    : "r" (p), "r" (val)
256 	    : "memory"
257 	);
258 
259 	return (ret);
260 }
261 
262 static __inline uint32_t
263 atomic_load_acq_32(volatile uint32_t *p)
264 {
265 	uint32_t ret;
266 
267 	__asm __volatile(
268 	    "ldar	%w0, [%1] \n"
269 	    : "=&r" (ret)
270 	    : "r" (p)
271 	    : "memory");
272 
273 	return (ret);
274 }
275 
276 static __inline uint64_t
277 atomic_load_acq_64(volatile uint64_t *p)
278 {
279 	uint64_t ret;
280 
281 	__asm __volatile(
282 	    "ldar	%0, [%1] \n"
283 	    : "=&r" (ret)
284 	    : "r" (p)
285 	    : "memory");
286 
287 	return (ret);
288 }
289 
290 static __inline void
291 atomic_store_rel_32(volatile uint32_t *p, uint32_t val)
292 {
293 
294 	__asm __volatile(
295 	    "stlr	%w0, [%1] \n"
296 	    :
297 	    : "r" (val), "r" (p)
298 	    : "memory");
299 }
300 
301 static __inline void
302 atomic_store_rel_64(volatile uint64_t *p, uint64_t val)
303 {
304 
305 	__asm __volatile(
306 	    "stlr	%0, [%1] \n"
307 	    :
308 	    : "r" (val), "r" (p)
309 	    : "memory");
310 }
311 
312 
313 #define	atomic_add_int			atomic_add_32
314 #define	atomic_clear_int		atomic_clear_32
315 #define	atomic_cmpset_int		atomic_cmpset_32
316 #define	atomic_fetchadd_int		atomic_fetchadd_32
317 #define	atomic_readandclear_int		atomic_readandclear_32
318 #define	atomic_set_int			atomic_set_32
319 #define	atomic_swap_int			atomic_swap_32
320 #define	atomic_subtract_int		atomic_subtract_32
321 
322 #define	atomic_add_acq_int		atomic_add_acq_32
323 #define	atomic_clear_acq_int		atomic_clear_acq_32
324 #define	atomic_cmpset_acq_int		atomic_cmpset_acq_32
325 #define	atomic_load_acq_int		atomic_load_acq_32
326 #define	atomic_set_acq_int		atomic_set_acq_32
327 #define	atomic_subtract_acq_int		atomic_subtract_acq_32
328 
329 #define	atomic_add_rel_int		atomic_add_rel_32
330 #define	atomic_clear_rel_int		atomic_add_rel_32
331 #define	atomic_cmpset_rel_int		atomic_cmpset_rel_32
332 #define	atomic_set_rel_int		atomic_set_rel_32
333 #define	atomic_subtract_rel_int		atomic_subtract_rel_32
334 #define	atomic_store_rel_int		atomic_store_rel_32
335 
336 #define	atomic_add_long			atomic_add_64
337 #define	atomic_clear_long		atomic_clear_64
338 #define	atomic_cmpset_long		atomic_cmpset_64
339 #define	atomic_fetchadd_long		atomic_fetchadd_64
340 #define	atomic_readandclear_long	atomic_readandclear_64
341 #define	atomic_set_long			atomic_set_64
342 #define	atomic_swap_long		atomic_swap_64
343 #define	atomic_subtract_long		atomic_subtract_64
344 
345 #define	atomic_add_ptr			atomic_add_64
346 #define	atomic_clear_ptr		atomic_clear_64
347 #define	atomic_cmpset_ptr		atomic_cmpset_64
348 #define	atomic_fetchadd_ptr		atomic_fetchadd_64
349 #define	atomic_readandclear_ptr		atomic_readandclear_64
350 #define	atomic_set_ptr			atomic_set_64
351 #define	atomic_swap_ptr			atomic_swap_64
352 #define	atomic_subtract_ptr		atomic_subtract_64
353 
354 #define	atomic_add_acq_long		atomic_add_acq_64
355 #define	atomic_clear_acq_long		atomic_add_acq_64
356 #define	atomic_cmpset_acq_long		atomic_cmpset_acq_64
357 #define	atomic_load_acq_long		atomic_load_acq_64
358 #define	atomic_set_acq_long		atomic_set_acq_64
359 #define	atomic_subtract_acq_long	atomic_subtract_acq_64
360 
361 #define	atomic_add_acq_ptr		atomic_add_acq_64
362 #define	atomic_clear_acq_ptr		atomic_add_acq_64
363 #define	atomic_cmpset_acq_ptr		atomic_cmpset_acq_64
364 #define	atomic_load_acq_ptr		atomic_load_acq_64
365 #define	atomic_set_acq_ptr		atomic_set_acq_64
366 #define	atomic_subtract_acq_ptr		atomic_subtract_acq_64
367 
368 #define	atomic_add_rel_long		atomic_add_rel_64
369 #define	atomic_clear_rel_long		atomic_clear_rel_64
370 #define	atomic_cmpset_rel_long		atomic_cmpset_rel_64
371 #define	atomic_set_rel_long		atomic_set_rel_64
372 #define	atomic_subtract_rel_long	atomic_subtract_rel_64
373 #define	atomic_store_rel_long		atomic_store_rel_64
374 
375 #define	atomic_add_rel_ptr		atomic_add_rel_64
376 #define	atomic_clear_rel_ptr		atomic_clear_rel_64
377 #define	atomic_cmpset_rel_ptr		atomic_cmpset_rel_64
378 #define	atomic_set_rel_ptr		atomic_set_rel_64
379 #define	atomic_subtract_rel_ptr		atomic_subtract_rel_64
380 #define	atomic_store_rel_ptr		atomic_store_rel_64
381 
382 static __inline void
383 atomic_thread_fence_acq(void)
384 {
385 
386 	dmb(ld);
387 }
388 
389 static __inline void
390 atomic_thread_fence_rel(void)
391 {
392 
393 	dmb(sy);
394 }
395 
396 static __inline void
397 atomic_thread_fence_acq_rel(void)
398 {
399 
400 	dmb(sy);
401 }
402 
403 static __inline void
404 atomic_thread_fence_seq_cst(void)
405 {
406 
407 	dmb(sy);
408 }
409 
410 #endif /* _MACHINE_ATOMIC_H_ */
411 
412