xref: /freebsd/sys/arm64/include/atomic.h (revision a80b9ee15aa0f2da9f9f0c48e13324e0e5f2e840)
1 /*-
2  * Copyright (c) 2013 Andrew Turner <andrew@freebsd.org>
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  * 2. Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in the
12  *    documentation and/or other materials provided with the distribution.
13  *
14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24  * SUCH DAMAGE.
25  */
26 
27 #ifdef __arm__
28 #include <arm/atomic.h>
29 #else /* !__arm__ */
30 
31 #ifndef	_MACHINE_ATOMIC_H_
32 #define	_MACHINE_ATOMIC_H_
33 
34 #define	isb()		__asm __volatile("isb" : : : "memory")
35 
36 /*
37  * Options for DMB and DSB:
38  *	oshld	Outer Shareable, load
39  *	oshst	Outer Shareable, store
40  *	osh	Outer Shareable, all
41  *	nshld	Non-shareable, load
42  *	nshst	Non-shareable, store
43  *	nsh	Non-shareable, all
44  *	ishld	Inner Shareable, load
45  *	ishst	Inner Shareable, store
46  *	ish	Inner Shareable, all
47  *	ld	Full system, load
48  *	st	Full system, store
49  *	sy	Full system, all
50  */
51 #define	dsb(opt)	__asm __volatile("dsb " __STRING(opt) : : : "memory")
52 #define	dmb(opt)	__asm __volatile("dmb " __STRING(opt) : : : "memory")
53 
54 #define	mb()	dmb(sy)	/* Full system memory barrier all */
55 #define	wmb()	dmb(st)	/* Full system memory barrier store */
56 #define	rmb()	dmb(ld)	/* Full system memory barrier load */
57 
58 #ifdef _KERNEL
59 extern _Bool lse_supported;
60 #endif
61 
62 #if defined(SAN_NEEDS_INTERCEPTORS) && !defined(SAN_RUNTIME)
63 #include <sys/atomic_san.h>
64 #else
65 
66 #include <sys/atomic_common.h>
67 
68 #if defined(__ARM_FEATURE_ATOMICS)
69 #define	_ATOMIC_LSE_SUPPORTED	1
70 #elif defined(_KERNEL)
71 #ifdef LSE_ATOMICS
72 #define	_ATOMIC_LSE_SUPPORTED	1
73 #else
74 #define	_ATOMIC_LSE_SUPPORTED	lse_supported
75 #endif
76 #else
77 #define	_ATOMIC_LSE_SUPPORTED	0
78 #endif
79 
80 #define	_ATOMIC_OP_PROTO(t, op, bar, flav)				\
81 static __inline void							\
82 atomic_##op##_##bar##t##flav(volatile uint##t##_t *p, uint##t##_t val)
83 
84 #define	_ATOMIC_OP_IMPL(t, w, s, op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \
85 _ATOMIC_OP_PROTO(t, op, bar, _llsc)					\
86 {									\
87 	uint##t##_t tmp;						\
88 	int res;							\
89 									\
90 	pre;								\
91 	__asm __volatile(						\
92 	    "1: ld"#a"xr"#s"	%"#w"0, [%2]\n"				\
93 	    "   "#llsc_asm_op"	%"#w"0, %"#w"0, %"#w"3\n"		\
94 	    "   st"#l"xr"#s"	%w1, %"#w"0, [%2]\n"			\
95 	    "   cbnz		%w1, 1b\n"				\
96 	    : "=&r"(tmp), "=&r"(res)					\
97 	    : "r" (p), "r" (val)					\
98 	    : "memory"							\
99 	);								\
100 }									\
101 									\
102 _ATOMIC_OP_PROTO(t, op, bar, _lse)					\
103 {									\
104 	uint##t##_t tmp;						\
105 									\
106 	pre;								\
107 	__asm __volatile(						\
108 	    ".arch_extension lse\n"					\
109 	    "ld"#lse_asm_op#a#l#s"	%"#w"2, %"#w"0, [%1]\n"		\
110 	    ".arch_extension nolse\n"					\
111 	    : "=r" (tmp)						\
112 	    : "r" (p), "r" (val)					\
113 	    : "memory"							\
114 	);								\
115 }									\
116 									\
117 _ATOMIC_OP_PROTO(t, op, bar, )						\
118 {									\
119 	if (_ATOMIC_LSE_SUPPORTED)					\
120 		atomic_##op##_##bar##t##_lse(p, val);			\
121 	else								\
122 		atomic_##op##_##bar##t##_llsc(p, val);			\
123 }
124 
125 #define	__ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, bar, a, l)	\
126 	_ATOMIC_OP_IMPL(8,  w, b, op, llsc_asm_op, lse_asm_op, pre,	\
127 	    bar, a, l)							\
128 	_ATOMIC_OP_IMPL(16, w, h, op, llsc_asm_op, lse_asm_op, pre,	\
129 	    bar, a, l)							\
130 	_ATOMIC_OP_IMPL(32, w,  , op, llsc_asm_op, lse_asm_op, pre,	\
131 	    bar, a, l)							\
132 	_ATOMIC_OP_IMPL(64,  ,  , op, llsc_asm_op, lse_asm_op, pre,	\
133 	    bar, a, l)
134 
135 #define	_ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre)			\
136 	__ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre,     ,  ,  )	\
137 	__ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, acq_, a,  )	\
138 	__ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, rel_,  , l)
139 
140 _ATOMIC_OP(add,      add, add, )
141 _ATOMIC_OP(clear,    bic, clr, )
142 _ATOMIC_OP(set,      orr, set, )
143 _ATOMIC_OP(subtract, add, add, val = -val)
144 
145 #define	_ATOMIC_CMPSET_PROTO(t, bar, flav)				\
146 static __inline int							\
147 atomic_cmpset_##bar##t##flav(volatile uint##t##_t *p,			\
148     uint##t##_t cmpval, uint##t##_t newval)
149 
150 #define	_ATOMIC_FCMPSET_PROTO(t, bar, flav)				\
151 static __inline int							\
152 atomic_fcmpset_##bar##t##flav(volatile uint##t##_t *p,			\
153     uint##t##_t *cmpval, uint##t##_t newval)
154 
155 #define	_ATOMIC_CMPSET_IMPL(t, w, s, bar, a, l)				\
156 _ATOMIC_CMPSET_PROTO(t, bar, _llsc)					\
157 {									\
158 	uint##t##_t tmp;						\
159 	int res;							\
160 									\
161 	__asm __volatile(						\
162 	    "1: mov		%w1, #1\n"				\
163 	    "   ld"#a"xr"#s"	%"#w"0, [%2]\n"				\
164 	    "   cmp		%"#w"0, %"#w"3\n"			\
165 	    "   b.ne		2f\n"					\
166 	    "   st"#l"xr"#s"	%w1, %"#w"4, [%2]\n"			\
167 	    "   cbnz		%w1, 1b\n"				\
168 	    "2:"							\
169 	    : "=&r"(tmp), "=&r"(res)					\
170 	    : "r" (p), "r" (cmpval), "r" (newval)			\
171 	    : "cc", "memory"						\
172 	);								\
173 									\
174 	return (!res);							\
175 }									\
176 									\
177 _ATOMIC_CMPSET_PROTO(t, bar, _lse)					\
178 {									\
179 	uint##t##_t oldval;						\
180 	int res;							\
181 									\
182 	oldval = cmpval;						\
183 	__asm __volatile(						\
184 	    ".arch_extension lse\n"					\
185 	    "cas"#a#l#s"	%"#w"1, %"#w"4, [%3]\n"			\
186 	    "cmp		%"#w"1, %"#w"2\n"			\
187 	    "cset		%w0, eq\n"				\
188 	    ".arch_extension nolse\n"					\
189 	    : "=r" (res), "+&r" (cmpval)				\
190 	    : "r" (oldval), "r" (p), "r" (newval)			\
191 	    : "cc", "memory"						\
192 	);								\
193 									\
194 	return (res);							\
195 }									\
196 									\
197 _ATOMIC_CMPSET_PROTO(t, bar, )						\
198 {									\
199 	if (_ATOMIC_LSE_SUPPORTED)					\
200 		return (atomic_cmpset_##bar##t##_lse(p, cmpval,		\
201 		    newval));						\
202 	else								\
203 		return (atomic_cmpset_##bar##t##_llsc(p, cmpval,	\
204 		    newval));						\
205 }									\
206 									\
207 _ATOMIC_FCMPSET_PROTO(t, bar, _llsc)					\
208 {									\
209 	uint##t##_t _cmpval, tmp;					\
210 	int res;							\
211 									\
212 	_cmpval = *cmpval;						\
213 	__asm __volatile(						\
214 	    "   mov		%w1, #1\n"				\
215 	    "   ld"#a"xr"#s"	%"#w"0, [%2]\n"				\
216 	    "   cmp		%"#w"0, %"#w"3\n"			\
217 	    "   b.ne		1f\n"					\
218 	    "   st"#l"xr"#s"	%w1, %"#w"4, [%2]\n"			\
219 	    "1:"							\
220 	    : "=&r"(tmp), "=&r"(res)					\
221 	    : "r" (p), "r" (_cmpval), "r" (newval)			\
222 	    : "cc", "memory"						\
223 	);								\
224 	*cmpval = tmp;							\
225 									\
226 	return (!res);							\
227 }									\
228 									\
229 _ATOMIC_FCMPSET_PROTO(t, bar, _lse)					\
230 {									\
231 	uint##t##_t _cmpval, tmp;					\
232 	int res;							\
233 									\
234 	_cmpval = tmp = *cmpval;					\
235 	__asm __volatile(						\
236 	    ".arch_extension lse\n"					\
237 	    "cas"#a#l#s"	%"#w"1, %"#w"4, [%3]\n"			\
238 	    "cmp		%"#w"1, %"#w"2\n"			\
239 	    "cset		%w0, eq\n"				\
240 	    ".arch_extension nolse\n"					\
241 	    : "=r" (res), "+&r" (tmp)					\
242 	    : "r" (_cmpval), "r" (p), "r" (newval)			\
243 	    : "cc", "memory"						\
244 	);								\
245 	*cmpval = tmp;							\
246 									\
247 	return (res);							\
248 }									\
249 									\
250 _ATOMIC_FCMPSET_PROTO(t, bar, )						\
251 {									\
252 	if (_ATOMIC_LSE_SUPPORTED)					\
253 		return (atomic_fcmpset_##bar##t##_lse(p, cmpval,	\
254 		    newval));						\
255 	else								\
256 		return (atomic_fcmpset_##bar##t##_llsc(p, cmpval,	\
257 		    newval));						\
258 }
259 
260 #define	_ATOMIC_CMPSET(bar, a, l)					\
261 	_ATOMIC_CMPSET_IMPL(8,  w, b, bar, a, l)			\
262 	_ATOMIC_CMPSET_IMPL(16, w, h, bar, a, l)			\
263 	_ATOMIC_CMPSET_IMPL(32, w,  , bar, a, l)			\
264 	_ATOMIC_CMPSET_IMPL(64,  ,  , bar, a, l)
265 
266 #define	atomic_cmpset_8		atomic_cmpset_8
267 #define	atomic_fcmpset_8	atomic_fcmpset_8
268 #define	atomic_cmpset_16	atomic_cmpset_16
269 #define	atomic_fcmpset_16	atomic_fcmpset_16
270 
271 _ATOMIC_CMPSET(    ,  , )
272 _ATOMIC_CMPSET(acq_, a, )
273 _ATOMIC_CMPSET(rel_,  ,l)
274 
275 #define	_ATOMIC_FETCHADD_PROTO(t, flav)					\
276 static __inline uint##t##_t						\
277 atomic_fetchadd_##t##flav(volatile uint##t##_t *p, uint##t##_t val)
278 
279 #define	_ATOMIC_FETCHADD_IMPL(t, w)					\
280 _ATOMIC_FETCHADD_PROTO(t, _llsc)					\
281 {									\
282 	uint##t##_t ret, tmp;						\
283 	int res;							\
284 									\
285 	__asm __volatile(						\
286 	    "1: ldxr	%"#w"2, [%3]\n"					\
287 	    "   add	%"#w"0, %"#w"2, %"#w"4\n"			\
288 	    "   stxr	%w1, %"#w"0, [%3]\n"				\
289             "   cbnz	%w1, 1b\n"					\
290 	    : "=&r" (tmp), "=&r" (res), "=&r" (ret)			\
291 	    : "r" (p), "r" (val)					\
292 	    : "memory"							\
293 	);								\
294 									\
295 	return (ret);							\
296 }									\
297 									\
298 _ATOMIC_FETCHADD_PROTO(t, _lse)						\
299 {									\
300 	uint##t##_t ret;						\
301 									\
302 	__asm __volatile(						\
303 	    ".arch_extension lse\n"					\
304 	    "ldadd	%"#w"2, %"#w"0, [%1]\n"				\
305 	    ".arch_extension nolse\n"					\
306 	    : "=r" (ret)						\
307 	    : "r" (p), "r" (val)					\
308 	    : "memory"							\
309 	);								\
310 									\
311 	return (ret);							\
312 }									\
313 									\
314 _ATOMIC_FETCHADD_PROTO(t, )						\
315 {									\
316 	if (_ATOMIC_LSE_SUPPORTED)					\
317 		return (atomic_fetchadd_##t##_lse(p, val));		\
318 	else								\
319 		return (atomic_fetchadd_##t##_llsc(p, val));		\
320 }
321 
322 _ATOMIC_FETCHADD_IMPL(32, w)
323 _ATOMIC_FETCHADD_IMPL(64,  )
324 
325 #define	_ATOMIC_SWAP_PROTO(t, flav)					\
326 static __inline uint##t##_t						\
327 atomic_swap_##t##flav(volatile uint##t##_t *p, uint##t##_t val)
328 
329 #define	_ATOMIC_READANDCLEAR_PROTO(t, flav)				\
330 static __inline uint##t##_t						\
331 atomic_readandclear_##t##flav(volatile uint##t##_t *p)
332 
333 #define	_ATOMIC_SWAP_IMPL(t, w, zreg)					\
334 _ATOMIC_SWAP_PROTO(t, _llsc)						\
335 {									\
336 	uint##t##_t ret;						\
337 	int res;							\
338 									\
339 	__asm __volatile(						\
340 	    "1: ldxr	%"#w"1, [%2]\n"					\
341 	    "   stxr	%w0, %"#w"3, [%2]\n"				\
342             "   cbnz	%w0, 1b\n"					\
343 	    : "=&r" (res), "=&r" (ret)					\
344 	    : "r" (p), "r" (val)					\
345 	    : "memory"							\
346 	);								\
347 									\
348 	return (ret);							\
349 }									\
350 									\
351 _ATOMIC_SWAP_PROTO(t, _lse)						\
352 {									\
353 	uint##t##_t ret;						\
354 									\
355 	__asm __volatile(						\
356 	    ".arch_extension lse\n"					\
357 	    "swp	%"#w"2, %"#w"0, [%1]\n"				\
358 	    ".arch_extension nolse\n"					\
359 	    : "=r" (ret)						\
360 	    : "r" (p), "r" (val)					\
361 	    : "memory"							\
362 	);								\
363 									\
364 	return (ret);							\
365 }									\
366 									\
367 _ATOMIC_SWAP_PROTO(t, )							\
368 {									\
369 	if (_ATOMIC_LSE_SUPPORTED)					\
370 		return (atomic_swap_##t##_lse(p, val));			\
371 	else								\
372 		return (atomic_swap_##t##_llsc(p, val));		\
373 }									\
374 									\
375 _ATOMIC_READANDCLEAR_PROTO(t, _llsc)					\
376 {									\
377 	uint##t##_t ret;						\
378 	int res;							\
379 									\
380 	__asm __volatile(						\
381 	    "1: ldxr	%"#w"1, [%2]\n"					\
382 	    "   stxr	%w0, "#zreg", [%2]\n"				\
383 	    "   cbnz	%w0, 1b\n"					\
384 	    : "=&r" (res), "=&r" (ret)					\
385 	    : "r" (p)							\
386 	    : "memory"							\
387 	);								\
388 									\
389 	return (ret);							\
390 }									\
391 									\
392 _ATOMIC_READANDCLEAR_PROTO(t, _lse)					\
393 {									\
394 	return (atomic_swap_##t##_lse(p, 0));				\
395 }									\
396 									\
397 _ATOMIC_READANDCLEAR_PROTO(t, )						\
398 {									\
399 	if (_ATOMIC_LSE_SUPPORTED)					\
400 		return (atomic_readandclear_##t##_lse(p));		\
401 	else								\
402 		return (atomic_readandclear_##t##_llsc(p));		\
403 }
404 
405 _ATOMIC_SWAP_IMPL(32, w, wzr)
406 _ATOMIC_SWAP_IMPL(64,  , xzr)
407 
408 #define	_ATOMIC_TEST_OP_PROTO(t, op, bar, flav)				\
409 static __inline int							\
410 atomic_testand##op##_##bar##t##flav(volatile uint##t##_t *p, u_int val)
411 
412 #define	_ATOMIC_TEST_OP_IMPL(t, w, op, llsc_asm_op, lse_asm_op, bar, a)	\
413 _ATOMIC_TEST_OP_PROTO(t, op, bar, _llsc)				\
414 {									\
415 	uint##t##_t mask, old, tmp;					\
416 	int res;							\
417 									\
418 	mask = ((uint##t##_t)1) << (val & (t - 1));			\
419 	__asm __volatile(						\
420 	    "1: ld"#a"xr	%"#w"2, [%3]\n"				\
421 	    "  "#llsc_asm_op"	%"#w"0, %"#w"2, %"#w"4\n"		\
422 	    "   stxr		%w1, %"#w"0, [%3]\n"			\
423 	    "   cbnz		%w1, 1b\n"				\
424 	    : "=&r" (tmp), "=&r" (res), "=&r" (old)			\
425 	    : "r" (p), "r" (mask)					\
426 	    : "memory"							\
427 	);								\
428 									\
429 	return ((old & mask) != 0);					\
430 }									\
431 									\
432 _ATOMIC_TEST_OP_PROTO(t, op, bar, _lse)					\
433 {									\
434 	uint##t##_t mask, old;						\
435 									\
436 	mask = ((uint##t##_t)1) << (val & (t - 1));			\
437 	__asm __volatile(						\
438 	    ".arch_extension lse\n"					\
439 	    "ld"#lse_asm_op#a"	%"#w"2, %"#w"0, [%1]\n"			\
440 	    ".arch_extension nolse\n"					\
441 	    : "=r" (old)						\
442 	    : "r" (p), "r" (mask)					\
443 	    : "memory"							\
444 	);								\
445 									\
446 	return ((old & mask) != 0);					\
447 }									\
448 									\
449 _ATOMIC_TEST_OP_PROTO(t, op, bar, )					\
450 {									\
451 	if (_ATOMIC_LSE_SUPPORTED)					\
452 		return (atomic_testand##op##_##bar##t##_lse(p, val));	\
453 	else								\
454 		return (atomic_testand##op##_##bar##t##_llsc(p, val));	\
455 }
456 
457 #define	_ATOMIC_TEST_OP(op, llsc_asm_op, lse_asm_op)			\
458 	_ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op,     ,  ) \
459 	_ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op, acq_, a) \
460 	_ATOMIC_TEST_OP_IMPL(64,  , op, llsc_asm_op, lse_asm_op,     ,  ) \
461 	_ATOMIC_TEST_OP_IMPL(64,  , op, llsc_asm_op, lse_asm_op, acq_, a)
462 
_ATOMIC_TEST_OP(clear,bic,clr)463 _ATOMIC_TEST_OP(clear, bic, clr)
464 _ATOMIC_TEST_OP(set,   orr, set)
465 
466 #define	_ATOMIC_LOAD_ACQ_IMPL(t, w, s)					\
467 static __inline uint##t##_t						\
468 atomic_load_acq_##t(volatile uint##t##_t *p)				\
469 {									\
470 	uint##t##_t ret;						\
471 									\
472 	__asm __volatile(						\
473 	    "ldar"#s"	%"#w"0, [%1]\n"					\
474 	    : "=&r" (ret)						\
475 	    : "r" (p)							\
476 	    : "memory");						\
477 									\
478 	return (ret);							\
479 }
480 
481 #define	atomic_load_acq_8	atomic_load_acq_8
482 #define	atomic_load_acq_16	atomic_load_acq_16
483 _ATOMIC_LOAD_ACQ_IMPL(8,  w, b)
484 _ATOMIC_LOAD_ACQ_IMPL(16, w, h)
485 _ATOMIC_LOAD_ACQ_IMPL(32, w,  )
486 _ATOMIC_LOAD_ACQ_IMPL(64,  ,  )
487 
488 #define	_ATOMIC_STORE_REL_IMPL(t, w, s)					\
489 static __inline void							\
490 atomic_store_rel_##t(volatile uint##t##_t *p, uint##t##_t val)		\
491 {									\
492 	__asm __volatile(						\
493 	    "stlr"#s"	%"#w"0, [%1]\n"					\
494 	    :								\
495 	    : "r" (val), "r" (p)					\
496 	    : "memory");						\
497 }
498 
499 _ATOMIC_STORE_REL_IMPL(8,  w, b)
500 _ATOMIC_STORE_REL_IMPL(16, w, h)
501 _ATOMIC_STORE_REL_IMPL(32, w,  )
502 _ATOMIC_STORE_REL_IMPL(64,  ,  )
503 
504 #define	atomic_add_char			atomic_add_8
505 #define	atomic_fcmpset_char		atomic_fcmpset_8
506 #define	atomic_clear_char		atomic_clear_8
507 #define	atomic_cmpset_char		atomic_cmpset_8
508 #define	atomic_fetchadd_char		atomic_fetchadd_8
509 #define	atomic_readandclear_char	atomic_readandclear_8
510 #define	atomic_set_char			atomic_set_8
511 #define	atomic_swap_char		atomic_swap_8
512 #define	atomic_subtract_char		atomic_subtract_8
513 #define	atomic_testandclear_char	atomic_testandclear_8
514 #define	atomic_testandset_char		atomic_testandset_8
515 
516 #define	atomic_add_acq_char		atomic_add_acq_8
517 #define	atomic_fcmpset_acq_char		atomic_fcmpset_acq_8
518 #define	atomic_clear_acq_char		atomic_clear_acq_8
519 #define	atomic_cmpset_acq_char		atomic_cmpset_acq_8
520 #define	atomic_load_acq_char		atomic_load_acq_8
521 #define	atomic_set_acq_char		atomic_set_acq_8
522 #define	atomic_subtract_acq_char	atomic_subtract_acq_8
523 #define	atomic_testandset_acq_char	atomic_testandset_acq_8
524 
525 #define	atomic_add_rel_char		atomic_add_rel_8
526 #define	atomic_fcmpset_rel_char		atomic_fcmpset_rel_8
527 #define	atomic_clear_rel_char		atomic_clear_rel_8
528 #define	atomic_cmpset_rel_char		atomic_cmpset_rel_8
529 #define	atomic_set_rel_char		atomic_set_rel_8
530 #define	atomic_subtract_rel_char	atomic_subtract_rel_8
531 #define	atomic_store_rel_char		atomic_store_rel_8
532 
533 #define	atomic_add_short		atomic_add_16
534 #define	atomic_fcmpset_short		atomic_fcmpset_16
535 #define	atomic_clear_short		atomic_clear_16
536 #define	atomic_cmpset_short		atomic_cmpset_16
537 #define	atomic_fetchadd_short		atomic_fetchadd_16
538 #define	atomic_readandclear_short	atomic_readandclear_16
539 #define	atomic_set_short		atomic_set_16
540 #define	atomic_swap_short		atomic_swap_16
541 #define	atomic_subtract_short		atomic_subtract_16
542 #define	atomic_testandclear_short	atomic_testandclear_16
543 #define	atomic_testandset_short		atomic_testandset_16
544 
545 #define	atomic_add_acq_short		atomic_add_acq_16
546 #define	atomic_fcmpset_acq_short	atomic_fcmpset_acq_16
547 #define	atomic_clear_acq_short		atomic_clear_acq_16
548 #define	atomic_cmpset_acq_short		atomic_cmpset_acq_16
549 #define	atomic_load_acq_short		atomic_load_acq_16
550 #define	atomic_set_acq_short		atomic_set_acq_16
551 #define	atomic_subtract_acq_short	atomic_subtract_acq_16
552 #define	atomic_testandset_acq_short	atomic_testandset_acq_16
553 
554 #define	atomic_add_rel_short		atomic_add_rel_16
555 #define	atomic_fcmpset_rel_short	atomic_fcmpset_rel_16
556 #define	atomic_clear_rel_short		atomic_clear_rel_16
557 #define	atomic_cmpset_rel_short		atomic_cmpset_rel_16
558 #define	atomic_set_rel_short		atomic_set_rel_16
559 #define	atomic_subtract_rel_short	atomic_subtract_rel_16
560 #define	atomic_store_rel_short		atomic_store_rel_16
561 
562 #define	atomic_add_int			atomic_add_32
563 #define	atomic_fcmpset_int		atomic_fcmpset_32
564 #define	atomic_clear_int		atomic_clear_32
565 #define	atomic_cmpset_int		atomic_cmpset_32
566 #define	atomic_fetchadd_int		atomic_fetchadd_32
567 #define	atomic_readandclear_int		atomic_readandclear_32
568 #define	atomic_set_int			atomic_set_32
569 #define	atomic_swap_int			atomic_swap_32
570 #define	atomic_subtract_int		atomic_subtract_32
571 #define	atomic_testandclear_int		atomic_testandclear_32
572 #define	atomic_testandset_int		atomic_testandset_32
573 
574 #define	atomic_add_acq_int		atomic_add_acq_32
575 #define	atomic_fcmpset_acq_int		atomic_fcmpset_acq_32
576 #define	atomic_clear_acq_int		atomic_clear_acq_32
577 #define	atomic_cmpset_acq_int		atomic_cmpset_acq_32
578 #define	atomic_load_acq_int		atomic_load_acq_32
579 #define	atomic_set_acq_int		atomic_set_acq_32
580 #define	atomic_subtract_acq_int		atomic_subtract_acq_32
581 #define	atomic_testandset_acq_int	atomic_testandset_acq_32
582 
583 #define	atomic_add_rel_int		atomic_add_rel_32
584 #define	atomic_fcmpset_rel_int		atomic_fcmpset_rel_32
585 #define	atomic_clear_rel_int		atomic_clear_rel_32
586 #define	atomic_cmpset_rel_int		atomic_cmpset_rel_32
587 #define	atomic_set_rel_int		atomic_set_rel_32
588 #define	atomic_subtract_rel_int		atomic_subtract_rel_32
589 #define	atomic_store_rel_int		atomic_store_rel_32
590 
591 #define	atomic_add_long			atomic_add_64
592 #define	atomic_fcmpset_long		atomic_fcmpset_64
593 #define	atomic_clear_long		atomic_clear_64
594 #define	atomic_cmpset_long		atomic_cmpset_64
595 #define	atomic_fetchadd_long		atomic_fetchadd_64
596 #define	atomic_readandclear_long	atomic_readandclear_64
597 #define	atomic_set_long			atomic_set_64
598 #define	atomic_swap_long		atomic_swap_64
599 #define	atomic_subtract_long		atomic_subtract_64
600 #define	atomic_testandclear_long	atomic_testandclear_64
601 #define	atomic_testandset_long		atomic_testandset_64
602 
603 #define	atomic_add_ptr			atomic_add_64
604 #define	atomic_fcmpset_ptr		atomic_fcmpset_64
605 #define	atomic_clear_ptr		atomic_clear_64
606 #define	atomic_cmpset_ptr		atomic_cmpset_64
607 #define	atomic_fetchadd_ptr		atomic_fetchadd_64
608 #define	atomic_readandclear_ptr		atomic_readandclear_64
609 #define	atomic_set_ptr			atomic_set_64
610 #define	atomic_swap_ptr			atomic_swap_64
611 #define	atomic_subtract_ptr		atomic_subtract_64
612 #define	atomic_testandclear_ptr		atomic_testandclear_64
613 #define	atomic_testandset_ptr		atomic_testandset_64
614 
615 #define	atomic_add_acq_long		atomic_add_acq_64
616 #define	atomic_fcmpset_acq_long		atomic_fcmpset_acq_64
617 #define	atomic_clear_acq_long		atomic_clear_acq_64
618 #define	atomic_cmpset_acq_long		atomic_cmpset_acq_64
619 #define	atomic_load_acq_long		atomic_load_acq_64
620 #define	atomic_set_acq_long		atomic_set_acq_64
621 #define	atomic_subtract_acq_long	atomic_subtract_acq_64
622 #define	atomic_testandset_acq_long	atomic_testandset_acq_64
623 
624 #define	atomic_add_acq_ptr		atomic_add_acq_64
625 #define	atomic_fcmpset_acq_ptr		atomic_fcmpset_acq_64
626 #define	atomic_clear_acq_ptr		atomic_clear_acq_64
627 #define	atomic_cmpset_acq_ptr		atomic_cmpset_acq_64
628 #define	atomic_load_acq_ptr		atomic_load_acq_64
629 #define	atomic_set_acq_ptr		atomic_set_acq_64
630 #define	atomic_subtract_acq_ptr		atomic_subtract_acq_64
631 
632 #define	atomic_add_rel_long		atomic_add_rel_64
633 #define	atomic_fcmpset_rel_long		atomic_fcmpset_rel_64
634 #define	atomic_clear_rel_long		atomic_clear_rel_64
635 #define	atomic_cmpset_rel_long		atomic_cmpset_rel_64
636 #define	atomic_set_rel_long		atomic_set_rel_64
637 #define	atomic_subtract_rel_long	atomic_subtract_rel_64
638 #define	atomic_store_rel_long		atomic_store_rel_64
639 
640 #define	atomic_add_rel_ptr		atomic_add_rel_64
641 #define	atomic_fcmpset_rel_ptr		atomic_fcmpset_rel_64
642 #define	atomic_clear_rel_ptr		atomic_clear_rel_64
643 #define	atomic_cmpset_rel_ptr		atomic_cmpset_rel_64
644 #define	atomic_set_rel_ptr		atomic_set_rel_64
645 #define	atomic_subtract_rel_ptr		atomic_subtract_rel_64
646 #define	atomic_store_rel_ptr		atomic_store_rel_64
647 
648 static __inline void
649 atomic_thread_fence_acq(void)
650 {
651 
652 	dmb(ld);
653 }
654 
655 static __inline void
atomic_thread_fence_rel(void)656 atomic_thread_fence_rel(void)
657 {
658 
659 	dmb(sy);
660 }
661 
662 static __inline void
atomic_thread_fence_acq_rel(void)663 atomic_thread_fence_acq_rel(void)
664 {
665 
666 	dmb(sy);
667 }
668 
669 static __inline void
atomic_thread_fence_seq_cst(void)670 atomic_thread_fence_seq_cst(void)
671 {
672 
673 	dmb(sy);
674 }
675 
676 #endif /* KCSAN && !KCSAN_RUNTIME */
677 #endif /* _MACHINE_ATOMIC_H_ */
678 
679 #endif /* !__arm__ */
680