xref: /freebsd/sys/i386/include/atomic.h (revision 9c0b759bf9b520537616d026f21a0a98d70acd11)
1069e9bc1SDoug Rabson /*-
283ef78beSPedro F. Giffuni  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
383ef78beSPedro F. Giffuni  *
4069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
5069e9bc1SDoug Rabson  * All rights reserved.
6069e9bc1SDoug Rabson  *
7069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
8069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
9069e9bc1SDoug Rabson  * are met:
10069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
12069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
13069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
14069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
15069e9bc1SDoug Rabson  *
16069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26069e9bc1SDoug Rabson  * SUCH DAMAGE.
27069e9bc1SDoug Rabson  *
28c3aac50fSPeter Wemm  * $FreeBSD$
29069e9bc1SDoug Rabson  */
30069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
31069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
32069e9bc1SDoug Rabson 
33a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
34a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
35a5f50ef9SJoerg Wunsch #endif
36a5f50ef9SJoerg Wunsch 
3730d4f9e8SKonstantin Belousov #include <sys/atomic_common.h>
3830d4f9e8SKonstantin Belousov 
393264fd70SJung-uk Kim #ifdef _KERNEL
403264fd70SJung-uk Kim #include <machine/md_var.h>
413264fd70SJung-uk Kim #include <machine/specialreg.h>
423264fd70SJung-uk Kim #endif
433264fd70SJung-uk Kim 
4448cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF
4548cae112SKonstantin Belousov /*
4648cae112SKonstantin Belousov  * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
4748cae112SKonstantin Belousov  *
4848cae112SKonstantin Belousov  * The open-coded number is used instead of the symbolic expression to
4948cae112SKonstantin Belousov  * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
5048cae112SKonstantin Belousov  * An assertion in i386/vm_machdep.c ensures that the value is correct.
5148cae112SKonstantin Belousov  */
5283c9dea1SGleb Smirnoff #define	__OFFSETOF_MONITORBUF	0x80
5348cae112SKonstantin Belousov 
5448cae112SKonstantin Belousov static __inline void
5548cae112SKonstantin Belousov __mbk(void)
5648cae112SKonstantin Belousov {
5748cae112SKonstantin Belousov 
5848cae112SKonstantin Belousov 	__asm __volatile("lock; addl $0,%%fs:%0"
5948cae112SKonstantin Belousov 	    : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc");
6048cae112SKonstantin Belousov }
6148cae112SKonstantin Belousov 
6248cae112SKonstantin Belousov static __inline void
6348cae112SKonstantin Belousov __mbu(void)
6448cae112SKonstantin Belousov {
6548cae112SKonstantin Belousov 
6648cae112SKonstantin Belousov 	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
6748cae112SKonstantin Belousov }
6848cae112SKonstantin Belousov #endif
69db7f0b97SKip Macy 
70069e9bc1SDoug Rabson /*
71f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
72f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
73069e9bc1SDoug Rabson  *
7447b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
7547b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
7647b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
7747b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
7847b8bc92SAlan Cox  *
7947b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
8047b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
8147b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
8247b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
8347b8bc92SAlan Cox  *
8447b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
8547b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
8647b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
8747b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
888a1ee2d3SJung-uk Kim  * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
89f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
9047b8bc92SAlan Cox  *
9147b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
9247b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
9347b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
9447b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
958a1ee2d3SJung-uk Kim  * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
96f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
97069e9bc1SDoug Rabson  */
98069e9bc1SDoug Rabson 
9947b8bc92SAlan Cox /*
10008c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
10108c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
10208c40841SAlan Cox  * built.
10308c40841SAlan Cox  *
10408c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
10508c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
10647b8bc92SAlan Cox  */
107f4b36404SMatt Macy #if !defined(__GNUCLIKE_ASM)
108e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
10986d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
11086d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
11108c40841SAlan Cox 
1123d673254SMark Johnston int	atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
1133d673254SMark Johnston int	atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
114065b12a7SPoul-Henning Kamp int	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
1153d673254SMark Johnston int	atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
1163d673254SMark Johnston int	atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
1173d673254SMark Johnston 	    u_short src);
118e7a98aefSMateusz Guzik int	atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
1193c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
1208a1ee2d3SJung-uk Kim int	atomic_testandset_int(volatile u_int *p, u_int v);
121dfdc9a05SSepherosa Ziehau int	atomic_testandclear_int(volatile u_int *p, u_int v);
1228954a9a4SKonstantin Belousov void	atomic_thread_fence_acq(void);
1238954a9a4SKonstantin Belousov void	atomic_thread_fence_acq_rel(void);
1248954a9a4SKonstantin Belousov void	atomic_thread_fence_rel(void);
1258954a9a4SKonstantin Belousov void	atomic_thread_fence_seq_cst(void);
126819e370cSPoul-Henning Kamp 
1277626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
128fa9f322dSKonstantin Belousov u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
129fa9f322dSKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
1308306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
1318a6b1c8fSJohn Baldwin 
1325188b5f3SJung-uk Kim int		atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
13325a1e0f6SHans Petter Selasky int		atomic_fcmpset_64(volatile uint64_t *, uint64_t *, uint64_t);
1343264fd70SJung-uk Kim uint64_t	atomic_load_acq_64(volatile uint64_t *);
1353264fd70SJung-uk Kim void		atomic_store_rel_64(volatile uint64_t *, uint64_t);
1365188b5f3SJung-uk Kim uint64_t	atomic_swap_64(volatile uint64_t *, uint64_t);
137322f006eSHans Petter Selasky uint64_t	atomic_fetchadd_64(volatile uint64_t *, uint64_t);
13843bb1274SHans Petter Selasky void		atomic_add_64(volatile uint64_t *, uint64_t);
13943bb1274SHans Petter Selasky void		atomic_subtract_64(volatile uint64_t *, uint64_t);
1403264fd70SJung-uk Kim 
141cbf999e7SKonstantin Belousov #else /* !__GNUCLIKE_ASM */
1424c5aee92SMark Murray 
1432a89a48fSJohn Baldwin /*
144*9c0b759bSKonstantin Belousov  * Always use lock prefixes.  The result is slighly less optimal for
145*9c0b759bSKonstantin Belousov  * UP systems, but it matters less now, and sometimes UP is emulated
146*9c0b759bSKonstantin Belousov  * over SMP.
147*9c0b759bSKonstantin Belousov  *
14886d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
14986d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
15086d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
15147b8bc92SAlan Cox  */
152e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
15347b8bc92SAlan Cox static __inline void					\
15403e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
15547b8bc92SAlan Cox {							\
156*9c0b759bSKonstantin Belousov 	__asm __volatile("lock; " OP			\
157fe94be3dSJung-uk Kim 	: "+m" (*p)					\
158fe94be3dSJung-uk Kim 	: CONS (V)					\
1597222d2fbSKonstantin Belousov 	: "cc");					\
1606d800f89SBruce Evans }							\
16186d2e48cSAttilio Rao 							\
16286d2e48cSAttilio Rao static __inline void					\
16386d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
16486d2e48cSAttilio Rao {							\
165*9c0b759bSKonstantin Belousov 	__asm __volatile("lock; " OP			\
166fe94be3dSJung-uk Kim 	: "+m" (*p)					\
167fe94be3dSJung-uk Kim 	: CONS (V)					\
1687222d2fbSKonstantin Belousov 	: "memory", "cc");				\
16986d2e48cSAttilio Rao }							\
1706d800f89SBruce Evans struct __hack
1714c5aee92SMark Murray 
172819e370cSPoul-Henning Kamp /*
1733d673254SMark Johnston  * Atomic compare and set, used by the mutex functions.
174819e370cSPoul-Henning Kamp  *
1753d673254SMark Johnston  * cmpset:
1763d673254SMark Johnston  *	if (*dst == expect)
1773d673254SMark Johnston  *		*dst = src
178819e370cSPoul-Henning Kamp  *
1793d673254SMark Johnston  * fcmpset:
1803d673254SMark Johnston  *	if (*dst == *expect)
1813d673254SMark Johnston  *		*dst = src
1823d673254SMark Johnston  *	else
1833d673254SMark Johnston  *		*expect = *dst
1843d673254SMark Johnston  *
1853d673254SMark Johnston  * Returns 0 on failure, non-zero on success.
186819e370cSPoul-Henning Kamp  */
1875788c2bdSMark Johnston #define	ATOMIC_CMPSET(TYPE, CONS)			\
1883d673254SMark Johnston static __inline int					\
1893d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
1903d673254SMark Johnston {							\
1913d673254SMark Johnston 	u_char res;					\
1923d673254SMark Johnston 							\
1933d673254SMark Johnston 	__asm __volatile(				\
194*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg	%3,%1 ;	"		\
1953d673254SMark Johnston 	"	sete	%0 ;		"		\
1963d673254SMark Johnston 	"# atomic_cmpset_" #TYPE "	"		\
1973d673254SMark Johnston 	: "=q" (res),			/* 0 */		\
1983d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
1993d673254SMark Johnston 	  "+a" (expect)			/* 2 */		\
2005788c2bdSMark Johnston 	: CONS (src)			/* 3 */		\
2013d673254SMark Johnston 	: "memory", "cc");				\
2023d673254SMark Johnston 	return (res);					\
2033d673254SMark Johnston }							\
2043d673254SMark Johnston 							\
2053d673254SMark Johnston static __inline int					\
2063d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
2073d673254SMark Johnston {							\
2083d673254SMark Johnston 	u_char res;					\
2093d673254SMark Johnston 							\
2103d673254SMark Johnston 	__asm __volatile(				\
211*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg	%3,%1 ;	"		\
2123d673254SMark Johnston 	"	sete	%0 ;		"		\
2133d673254SMark Johnston 	"# atomic_fcmpset_" #TYPE "	"		\
2143d673254SMark Johnston 	: "=q" (res),			/* 0 */		\
2153d673254SMark Johnston 	  "+m" (*dst),			/* 1 */		\
2163d673254SMark Johnston 	  "+a" (*expect)		/* 2 */		\
2175788c2bdSMark Johnston 	: CONS (src)			/* 3 */		\
2183d673254SMark Johnston 	: "memory", "cc");				\
2193d673254SMark Johnston 	return (res);					\
2208448afceSAttilio Rao }
2214c5aee92SMark Murray 
2225788c2bdSMark Johnston ATOMIC_CMPSET(char, "q");
2235788c2bdSMark Johnston ATOMIC_CMPSET(short, "r");
2245788c2bdSMark Johnston ATOMIC_CMPSET(int, "r");
225e7a98aefSMateusz Guzik 
2263c2bc2bfSJohn Baldwin /*
2273c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
2283c2bc2bfSJohn Baldwin  * the previous value of *p.
2293c2bc2bfSJohn Baldwin  */
2303c2bc2bfSJohn Baldwin static __inline u_int
2313c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
2323c2bc2bfSJohn Baldwin {
2333c2bc2bfSJohn Baldwin 
2343c2bc2bfSJohn Baldwin 	__asm __volatile(
235*9c0b759bSKonstantin Belousov 	"	lock; xaddl	%0,%1 ;	"
2363c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
237ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
238fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
239fe94be3dSJung-uk Kim 	: : "cc");
2403c2bc2bfSJohn Baldwin 	return (v);
2413c2bc2bfSJohn Baldwin }
2423c2bc2bfSJohn Baldwin 
2438a1ee2d3SJung-uk Kim static __inline int
2448a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
2458a1ee2d3SJung-uk Kim {
2468a1ee2d3SJung-uk Kim 	u_char res;
2478a1ee2d3SJung-uk Kim 
2488a1ee2d3SJung-uk Kim 	__asm __volatile(
249*9c0b759bSKonstantin Belousov 	"	lock; btsl	%2,%1 ;	"
2508a1ee2d3SJung-uk Kim 	"	setc	%0 ;		"
2518a1ee2d3SJung-uk Kim 	"# atomic_testandset_int"
2528a1ee2d3SJung-uk Kim 	: "=q" (res),			/* 0 */
2538a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2548a1ee2d3SJung-uk Kim 	: "Ir" (v & 0x1f)		/* 2 */
2558a1ee2d3SJung-uk Kim 	: "cc");
2568a1ee2d3SJung-uk Kim 	return (res);
2578a1ee2d3SJung-uk Kim }
2588a1ee2d3SJung-uk Kim 
259dfdc9a05SSepherosa Ziehau static __inline int
260dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v)
261dfdc9a05SSepherosa Ziehau {
262dfdc9a05SSepherosa Ziehau 	u_char res;
263dfdc9a05SSepherosa Ziehau 
264dfdc9a05SSepherosa Ziehau 	__asm __volatile(
265*9c0b759bSKonstantin Belousov 	"	lock; btrl	%2,%1 ;	"
266dfdc9a05SSepherosa Ziehau 	"	setc	%0 ;		"
267dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_int"
268dfdc9a05SSepherosa Ziehau 	: "=q" (res),			/* 0 */
269dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
270dfdc9a05SSepherosa Ziehau 	: "Ir" (v & 0x1f)		/* 2 */
271dfdc9a05SSepherosa Ziehau 	: "cc");
272dfdc9a05SSepherosa Ziehau 	return (res);
273dfdc9a05SSepherosa Ziehau }
274dfdc9a05SSepherosa Ziehau 
275fa9f322dSKonstantin Belousov /*
276fa9f322dSKonstantin Belousov  * We assume that a = b will do atomic loads and stores.  Due to the
277fa9f322dSKonstantin Belousov  * IA32 memory model, a simple store guarantees release semantics.
278fa9f322dSKonstantin Belousov  *
2797626d062SKonstantin Belousov  * However, a load may pass a store if they are performed on distinct
280dd5b6425SKonstantin Belousov  * addresses, so we need Store/Load barrier for sequentially
281dd5b6425SKonstantin Belousov  * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
282dd5b6425SKonstantin Belousov  * Store/Load barrier, as recommended by the AMD Software Optimization
283dd5b6425SKonstantin Belousov  * Guide, and not mfence.  In the kernel, we use a private per-cpu
2840b6476ecSKonstantin Belousov  * cache line for "mem", to avoid introducing false data
2850b6476ecSKonstantin Belousov  * dependencies.  In user space, we use the word at the top of the
2860b6476ecSKonstantin Belousov  * stack.
2877626d062SKonstantin Belousov  *
2887626d062SKonstantin Belousov  * For UP kernels, however, the memory of the single processor is
2897626d062SKonstantin Belousov  * always consistent, so we only need to stop the compiler from
2907626d062SKonstantin Belousov  * reordering accesses in a way that violates the semantics of acquire
2917626d062SKonstantin Belousov  * and release.
292fa9f322dSKonstantin Belousov  */
29348cae112SKonstantin Belousov 
2947626d062SKonstantin Belousov #if defined(_KERNEL)
29548cae112SKonstantin Belousov #define	__storeload_barrier()	__mbk()
2967626d062SKonstantin Belousov #else /* !_KERNEL */
29748cae112SKonstantin Belousov #define	__storeload_barrier()	__mbu()
2987626d062SKonstantin Belousov #endif /* _KERNEL*/
2997626d062SKonstantin Belousov 
3007626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
3019d979d89SJohn Baldwin static __inline u_##TYPE					\
3029d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
3039d979d89SJohn Baldwin {								\
3049d979d89SJohn Baldwin 	u_##TYPE res;						\
3059d979d89SJohn Baldwin 								\
3067626d062SKonstantin Belousov 	res = *p;						\
3077626d062SKonstantin Belousov 	__compiler_membar();					\
3089d979d89SJohn Baldwin 	return (res);						\
3099d979d89SJohn Baldwin }								\
3106d800f89SBruce Evans struct __hack
3114c5aee92SMark Murray 
3127626d062SKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
3137626d062SKonstantin Belousov static __inline void						\
3147626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
3157626d062SKonstantin Belousov {								\
3167626d062SKonstantin Belousov 								\
3177626d062SKonstantin Belousov 	__compiler_membar();					\
3187626d062SKonstantin Belousov 	*p = v;							\
3197626d062SKonstantin Belousov }								\
3207626d062SKonstantin Belousov struct __hack
3214c5aee92SMark Murray 
3228954a9a4SKonstantin Belousov static __inline void
3238954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
3248954a9a4SKonstantin Belousov {
3258954a9a4SKonstantin Belousov 
3268954a9a4SKonstantin Belousov 	__compiler_membar();
3278954a9a4SKonstantin Belousov }
3288954a9a4SKonstantin Belousov 
3298954a9a4SKonstantin Belousov static __inline void
3308954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
3318954a9a4SKonstantin Belousov {
3328954a9a4SKonstantin Belousov 
3338954a9a4SKonstantin Belousov 	__compiler_membar();
3348954a9a4SKonstantin Belousov }
3358954a9a4SKonstantin Belousov 
3368954a9a4SKonstantin Belousov static __inline void
3378954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
3388954a9a4SKonstantin Belousov {
3398954a9a4SKonstantin Belousov 
3408954a9a4SKonstantin Belousov 	__compiler_membar();
3418954a9a4SKonstantin Belousov }
3428954a9a4SKonstantin Belousov 
3438954a9a4SKonstantin Belousov static __inline void
3448954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
3458954a9a4SKonstantin Belousov {
3468954a9a4SKonstantin Belousov 
3478954a9a4SKonstantin Belousov 	__storeload_barrier();
3488954a9a4SKonstantin Belousov }
3498954a9a4SKonstantin Belousov 
3503264fd70SJung-uk Kim #ifdef _KERNEL
3513264fd70SJung-uk Kim 
3523264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS
3535188b5f3SJung-uk Kim int		atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
3545188b5f3SJung-uk Kim int		atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
3553264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i386(volatile uint64_t *);
3563264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i586(volatile uint64_t *);
3573264fd70SJung-uk Kim void		atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
3583264fd70SJung-uk Kim void		atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
3595188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i386(volatile uint64_t *, uint64_t);
3605188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i586(volatile uint64_t *, uint64_t);
3613264fd70SJung-uk Kim #endif
3623264fd70SJung-uk Kim 
3633264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */
3645188b5f3SJung-uk Kim static __inline int
3655188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
3665188b5f3SJung-uk Kim {
3675188b5f3SJung-uk Kim 	volatile uint32_t *p;
3685188b5f3SJung-uk Kim 	u_char res;
3695188b5f3SJung-uk Kim 
3705188b5f3SJung-uk Kim 	p = (volatile uint32_t *)dst;
3715188b5f3SJung-uk Kim 	__asm __volatile(
3725188b5f3SJung-uk Kim 	"	pushfl ;		"
3735188b5f3SJung-uk Kim 	"	cli ;			"
3745188b5f3SJung-uk Kim 	"	xorl	%1,%%eax ;	"
3755188b5f3SJung-uk Kim 	"	xorl	%2,%%edx ;	"
3765188b5f3SJung-uk Kim 	"	orl	%%edx,%%eax ;	"
3775188b5f3SJung-uk Kim 	"	jne	1f ;		"
3785188b5f3SJung-uk Kim 	"	movl	%4,%1 ;		"
3795188b5f3SJung-uk Kim 	"	movl	%5,%2 ;		"
3805188b5f3SJung-uk Kim 	"1:				"
3815188b5f3SJung-uk Kim 	"	sete	%3 ;		"
3825188b5f3SJung-uk Kim 	"	popfl"
3835188b5f3SJung-uk Kim 	: "+A" (expect),		/* 0 */
3845188b5f3SJung-uk Kim 	  "+m" (*p),			/* 1 */
3855188b5f3SJung-uk Kim 	  "+m" (*(p + 1)),		/* 2 */
3865188b5f3SJung-uk Kim 	  "=q" (res)			/* 3 */
3875188b5f3SJung-uk Kim 	: "r" ((uint32_t)src),		/* 4 */
3885188b5f3SJung-uk Kim 	  "r" ((uint32_t)(src >> 32))	/* 5 */
3895188b5f3SJung-uk Kim 	: "memory", "cc");
3905188b5f3SJung-uk Kim 	return (res);
3915188b5f3SJung-uk Kim }
3925188b5f3SJung-uk Kim 
39325a1e0f6SHans Petter Selasky static __inline int
39425a1e0f6SHans Petter Selasky atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
39525a1e0f6SHans Petter Selasky {
39625a1e0f6SHans Petter Selasky 
39725a1e0f6SHans Petter Selasky 	if (atomic_cmpset_64_i386(dst, *expect, src)) {
39825a1e0f6SHans Petter Selasky 		return (1);
39925a1e0f6SHans Petter Selasky 	} else {
40025a1e0f6SHans Petter Selasky 		*expect = *dst;
40125a1e0f6SHans Petter Selasky 		return (0);
40225a1e0f6SHans Petter Selasky 	}
40325a1e0f6SHans Petter Selasky }
40425a1e0f6SHans Petter Selasky 
4053264fd70SJung-uk Kim static __inline uint64_t
4063264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p)
4073264fd70SJung-uk Kim {
4083264fd70SJung-uk Kim 	volatile uint32_t *q;
4093264fd70SJung-uk Kim 	uint64_t res;
4103264fd70SJung-uk Kim 
4113264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
4123264fd70SJung-uk Kim 	__asm __volatile(
4133264fd70SJung-uk Kim 	"	pushfl ;		"
4143264fd70SJung-uk Kim 	"	cli ;			"
4153264fd70SJung-uk Kim 	"	movl	%1,%%eax ;	"
4163264fd70SJung-uk Kim 	"	movl	%2,%%edx ;	"
4173264fd70SJung-uk Kim 	"	popfl"
4183264fd70SJung-uk Kim 	: "=&A" (res)			/* 0 */
4193264fd70SJung-uk Kim 	: "m" (*q),			/* 1 */
4203264fd70SJung-uk Kim 	  "m" (*(q + 1))		/* 2 */
4213264fd70SJung-uk Kim 	: "memory");
4223264fd70SJung-uk Kim 	return (res);
4233264fd70SJung-uk Kim }
4243264fd70SJung-uk Kim 
4253264fd70SJung-uk Kim static __inline void
4263264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
4273264fd70SJung-uk Kim {
4283264fd70SJung-uk Kim 	volatile uint32_t *q;
4293264fd70SJung-uk Kim 
4303264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
4313264fd70SJung-uk Kim 	__asm __volatile(
4323264fd70SJung-uk Kim 	"	pushfl ;		"
4333264fd70SJung-uk Kim 	"	cli ;			"
4343264fd70SJung-uk Kim 	"	movl	%%eax,%0 ;	"
4353264fd70SJung-uk Kim 	"	movl	%%edx,%1 ;	"
4363264fd70SJung-uk Kim 	"	popfl"
4373264fd70SJung-uk Kim 	: "=m" (*q),			/* 0 */
4383264fd70SJung-uk Kim 	  "=m" (*(q + 1))		/* 1 */
4393264fd70SJung-uk Kim 	: "A" (v)			/* 2 */
4403264fd70SJung-uk Kim 	: "memory");
4413264fd70SJung-uk Kim }
4423264fd70SJung-uk Kim 
4433264fd70SJung-uk Kim static __inline uint64_t
4445188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
4455188b5f3SJung-uk Kim {
4465188b5f3SJung-uk Kim 	volatile uint32_t *q;
4475188b5f3SJung-uk Kim 	uint64_t res;
4485188b5f3SJung-uk Kim 
4495188b5f3SJung-uk Kim 	q = (volatile uint32_t *)p;
4505188b5f3SJung-uk Kim 	__asm __volatile(
4515188b5f3SJung-uk Kim 	"	pushfl ;		"
4525188b5f3SJung-uk Kim 	"	cli ;			"
4535188b5f3SJung-uk Kim 	"	movl	%1,%%eax ;	"
4545188b5f3SJung-uk Kim 	"	movl	%2,%%edx ;	"
4555188b5f3SJung-uk Kim 	"	movl	%4,%2 ;		"
4565188b5f3SJung-uk Kim 	"	movl	%3,%1 ;		"
4575188b5f3SJung-uk Kim 	"	popfl"
4585188b5f3SJung-uk Kim 	: "=&A" (res),			/* 0 */
4595188b5f3SJung-uk Kim 	  "+m" (*q),			/* 1 */
4605188b5f3SJung-uk Kim 	  "+m" (*(q + 1))		/* 2 */
4615188b5f3SJung-uk Kim 	: "r" ((uint32_t)v),		/* 3 */
4625188b5f3SJung-uk Kim 	  "r" ((uint32_t)(v >> 32)));	/* 4 */
4635188b5f3SJung-uk Kim 	return (res);
4645188b5f3SJung-uk Kim }
4655188b5f3SJung-uk Kim 
4665188b5f3SJung-uk Kim static __inline int
4675188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
4685188b5f3SJung-uk Kim {
4695188b5f3SJung-uk Kim 	u_char res;
4705188b5f3SJung-uk Kim 
4715188b5f3SJung-uk Kim 	__asm __volatile(
472*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg8b %1 ;	"
4735188b5f3SJung-uk Kim 	"	sete	%0"
4745188b5f3SJung-uk Kim 	: "=q" (res),			/* 0 */
4755188b5f3SJung-uk Kim 	  "+m" (*dst),			/* 1 */
4765188b5f3SJung-uk Kim 	  "+A" (expect)			/* 2 */
4775188b5f3SJung-uk Kim 	: "b" ((uint32_t)src),		/* 3 */
4785188b5f3SJung-uk Kim 	  "c" ((uint32_t)(src >> 32))	/* 4 */
4795188b5f3SJung-uk Kim 	: "memory", "cc");
4805188b5f3SJung-uk Kim 	return (res);
4815188b5f3SJung-uk Kim }
4825188b5f3SJung-uk Kim 
48325a1e0f6SHans Petter Selasky static __inline int
48425a1e0f6SHans Petter Selasky atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
48525a1e0f6SHans Petter Selasky {
48625a1e0f6SHans Petter Selasky 	u_char res;
48725a1e0f6SHans Petter Selasky 
48825a1e0f6SHans Petter Selasky 	__asm __volatile(
489*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg8b %1 ;	"
49025a1e0f6SHans Petter Selasky 	"	sete	%0"
49125a1e0f6SHans Petter Selasky 	: "=q" (res),			/* 0 */
49225a1e0f6SHans Petter Selasky 	  "+m" (*dst),			/* 1 */
49325a1e0f6SHans Petter Selasky 	  "+A" (*expect)		/* 2 */
49425a1e0f6SHans Petter Selasky 	: "b" ((uint32_t)src),		/* 3 */
49525a1e0f6SHans Petter Selasky 	  "c" ((uint32_t)(src >> 32))	/* 4 */
49625a1e0f6SHans Petter Selasky 	: "memory", "cc");
49725a1e0f6SHans Petter Selasky 	return (res);
49825a1e0f6SHans Petter Selasky }
49925a1e0f6SHans Petter Selasky 
5005188b5f3SJung-uk Kim static __inline uint64_t
5013264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p)
5023264fd70SJung-uk Kim {
5033264fd70SJung-uk Kim 	uint64_t res;
5043264fd70SJung-uk Kim 
5053264fd70SJung-uk Kim 	__asm __volatile(
5063264fd70SJung-uk Kim 	"	movl	%%ebx,%%eax ;	"
5073264fd70SJung-uk Kim 	"	movl	%%ecx,%%edx ;	"
508*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg8b %1"
5093264fd70SJung-uk Kim 	: "=&A" (res),			/* 0 */
5103264fd70SJung-uk Kim 	  "+m" (*p)			/* 1 */
5113264fd70SJung-uk Kim 	: : "memory", "cc");
5123264fd70SJung-uk Kim 	return (res);
5133264fd70SJung-uk Kim }
5143264fd70SJung-uk Kim 
5153264fd70SJung-uk Kim static __inline void
5163264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
5173264fd70SJung-uk Kim {
5183264fd70SJung-uk Kim 
5193264fd70SJung-uk Kim 	__asm __volatile(
5203264fd70SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
5213264fd70SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
5223264fd70SJung-uk Kim 	"1:				"
523*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg8b %0 ;	"
5243264fd70SJung-uk Kim 	"	jne	1b"
5253264fd70SJung-uk Kim 	: "+m" (*p),			/* 0 */
5263264fd70SJung-uk Kim 	  "+A" (v)			/* 1 */
5273264fd70SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
5283264fd70SJung-uk Kim }
5293264fd70SJung-uk Kim 
5303264fd70SJung-uk Kim static __inline uint64_t
5315188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
5325188b5f3SJung-uk Kim {
5335188b5f3SJung-uk Kim 
5345188b5f3SJung-uk Kim 	__asm __volatile(
5355188b5f3SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
5365188b5f3SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
5375188b5f3SJung-uk Kim 	"1:				"
538*9c0b759bSKonstantin Belousov 	"	lock; cmpxchg8b %0 ;	"
5395188b5f3SJung-uk Kim 	"	jne	1b"
5405188b5f3SJung-uk Kim 	: "+m" (*p),			/* 0 */
5415188b5f3SJung-uk Kim 	  "+A" (v)			/* 1 */
5425188b5f3SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
5435188b5f3SJung-uk Kim 	return (v);
5445188b5f3SJung-uk Kim }
5455188b5f3SJung-uk Kim 
5465188b5f3SJung-uk Kim static __inline int
5475188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
5485188b5f3SJung-uk Kim {
5495188b5f3SJung-uk Kim 
5505188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5515188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i386(dst, expect, src));
5525188b5f3SJung-uk Kim 	else
5535188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i586(dst, expect, src));
5545188b5f3SJung-uk Kim }
5555188b5f3SJung-uk Kim 
55625a1e0f6SHans Petter Selasky static __inline int
55725a1e0f6SHans Petter Selasky atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
55825a1e0f6SHans Petter Selasky {
55925a1e0f6SHans Petter Selasky 
56025a1e0f6SHans Petter Selasky   	if ((cpu_feature & CPUID_CX8) == 0)
56125a1e0f6SHans Petter Selasky 		return (atomic_fcmpset_64_i386(dst, expect, src));
56225a1e0f6SHans Petter Selasky 	else
56325a1e0f6SHans Petter Selasky 		return (atomic_fcmpset_64_i586(dst, expect, src));
56425a1e0f6SHans Petter Selasky }
56525a1e0f6SHans Petter Selasky 
5665188b5f3SJung-uk Kim static __inline uint64_t
5673264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p)
5683264fd70SJung-uk Kim {
5693264fd70SJung-uk Kim 
5703264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5713264fd70SJung-uk Kim 		return (atomic_load_acq_64_i386(p));
5723264fd70SJung-uk Kim 	else
5733264fd70SJung-uk Kim 		return (atomic_load_acq_64_i586(p));
5743264fd70SJung-uk Kim }
5753264fd70SJung-uk Kim 
5763264fd70SJung-uk Kim static __inline void
5773264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v)
5783264fd70SJung-uk Kim {
5793264fd70SJung-uk Kim 
5803264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5813264fd70SJung-uk Kim 		atomic_store_rel_64_i386(p, v);
5823264fd70SJung-uk Kim 	else
5833264fd70SJung-uk Kim 		atomic_store_rel_64_i586(p, v);
5843264fd70SJung-uk Kim }
5853264fd70SJung-uk Kim 
5865188b5f3SJung-uk Kim static __inline uint64_t
5875188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v)
5885188b5f3SJung-uk Kim {
5895188b5f3SJung-uk Kim 
5905188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5915188b5f3SJung-uk Kim 		return (atomic_swap_64_i386(p, v));
5925188b5f3SJung-uk Kim 	else
5935188b5f3SJung-uk Kim 		return (atomic_swap_64_i586(p, v));
5945188b5f3SJung-uk Kim }
5955188b5f3SJung-uk Kim 
596322f006eSHans Petter Selasky static __inline uint64_t
597322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
598322f006eSHans Petter Selasky {
599322f006eSHans Petter Selasky 
600322f006eSHans Petter Selasky 	for (;;) {
601322f006eSHans Petter Selasky 		uint64_t t = *p;
602322f006eSHans Petter Selasky 		if (atomic_cmpset_64(p, t, t + v))
603322f006eSHans Petter Selasky 			return (t);
604322f006eSHans Petter Selasky 	}
605322f006eSHans Petter Selasky }
606322f006eSHans Petter Selasky 
60743bb1274SHans Petter Selasky static __inline void
60843bb1274SHans Petter Selasky atomic_add_64(volatile uint64_t *p, uint64_t v)
60943bb1274SHans Petter Selasky {
61043bb1274SHans Petter Selasky 	uint64_t t;
61143bb1274SHans Petter Selasky 
61243bb1274SHans Petter Selasky 	for (;;) {
61343bb1274SHans Petter Selasky 		t = *p;
61443bb1274SHans Petter Selasky 		if (atomic_cmpset_64(p, t, t + v))
61543bb1274SHans Petter Selasky 			break;
61643bb1274SHans Petter Selasky 	}
61743bb1274SHans Petter Selasky }
61843bb1274SHans Petter Selasky 
61943bb1274SHans Petter Selasky static __inline void
62043bb1274SHans Petter Selasky atomic_subtract_64(volatile uint64_t *p, uint64_t v)
62143bb1274SHans Petter Selasky {
62243bb1274SHans Petter Selasky 	uint64_t t;
62343bb1274SHans Petter Selasky 
62443bb1274SHans Petter Selasky 	for (;;) {
62543bb1274SHans Petter Selasky 		t = *p;
62643bb1274SHans Petter Selasky 		if (atomic_cmpset_64(p, t, t - v))
62743bb1274SHans Petter Selasky 			break;
62843bb1274SHans Petter Selasky 	}
62943bb1274SHans Petter Selasky }
63043bb1274SHans Petter Selasky 
6313264fd70SJung-uk Kim #endif /* _KERNEL */
6323264fd70SJung-uk Kim 
633cbf999e7SKonstantin Belousov #endif /* !__GNUCLIKE_ASM */
6348a6b1c8fSJohn Baldwin 
6358306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
6368306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
6378306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
6388306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
6398a6b1c8fSJohn Baldwin 
6408306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
6418306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
6428306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
6438306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
6448a6b1c8fSJohn Baldwin 
6458306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
6468306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
6478306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
6488306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
6498a6b1c8fSJohn Baldwin 
6508306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
6518306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
6528306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
6538306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
6549d979d89SJohn Baldwin 
6557626d062SKonstantin Belousov #define	ATOMIC_LOADSTORE(TYPE)				\
6567626d062SKonstantin Belousov 	ATOMIC_LOAD(TYPE);				\
6577626d062SKonstantin Belousov 	ATOMIC_STORE(TYPE)
658fa9f322dSKonstantin Belousov 
6597626d062SKonstantin Belousov ATOMIC_LOADSTORE(char);
6607626d062SKonstantin Belousov ATOMIC_LOADSTORE(short);
6617626d062SKonstantin Belousov ATOMIC_LOADSTORE(int);
6627626d062SKonstantin Belousov ATOMIC_LOADSTORE(long);
663ccbdd9eeSJohn Baldwin 
6648a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
665fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
666fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
6677626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE
668ccbdd9eeSJohn Baldwin 
669f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
67048281036SJohn Baldwin 
67148281036SJohn Baldwin static __inline int
672065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
67348281036SJohn Baldwin {
67448281036SJohn Baldwin 
675065b12a7SPoul-Henning Kamp 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
67648281036SJohn Baldwin 	    (u_int)src));
67748281036SJohn Baldwin }
67848281036SJohn Baldwin 
67925a1e0f6SHans Petter Selasky static __inline int
68025a1e0f6SHans Petter Selasky atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src)
68125a1e0f6SHans Petter Selasky {
68225a1e0f6SHans Petter Selasky 
68325a1e0f6SHans Petter Selasky 	return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect,
68425a1e0f6SHans Petter Selasky 	    (u_int)src));
68525a1e0f6SHans Petter Selasky }
68625a1e0f6SHans Petter Selasky 
6876eb4157fSPawel Jakub Dawidek static __inline u_long
6886eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
6896eb4157fSPawel Jakub Dawidek {
6906eb4157fSPawel Jakub Dawidek 
6916eb4157fSPawel Jakub Dawidek 	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
6926eb4157fSPawel Jakub Dawidek }
6936eb4157fSPawel Jakub Dawidek 
6948a1ee2d3SJung-uk Kim static __inline int
6958a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
6968a1ee2d3SJung-uk Kim {
6978a1ee2d3SJung-uk Kim 
6988a1ee2d3SJung-uk Kim 	return (atomic_testandset_int((volatile u_int *)p, v));
6998a1ee2d3SJung-uk Kim }
7008a1ee2d3SJung-uk Kim 
701dfdc9a05SSepherosa Ziehau static __inline int
702dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v)
703dfdc9a05SSepherosa Ziehau {
704dfdc9a05SSepherosa Ziehau 
705dfdc9a05SSepherosa Ziehau 	return (atomic_testandclear_int((volatile u_int *)p, v));
706dfdc9a05SSepherosa Ziehau }
707dfdc9a05SSepherosa Ziehau 
7088a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
70948281036SJohn Baldwin #ifdef __GNUCLIKE_ASM
71048281036SJohn Baldwin 
71148281036SJohn Baldwin static __inline u_int
7128a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
71348281036SJohn Baldwin {
71448281036SJohn Baldwin 
71548281036SJohn Baldwin 	__asm __volatile(
71648281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
7178a1ee2d3SJung-uk Kim 	"# atomic_swap_int"
7188a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
719fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
7208a1ee2d3SJung-uk Kim 	return (v);
72148281036SJohn Baldwin }
72248281036SJohn Baldwin 
72348281036SJohn Baldwin static __inline u_long
7248a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
72548281036SJohn Baldwin {
72648281036SJohn Baldwin 
7278a1ee2d3SJung-uk Kim 	return (atomic_swap_int((volatile u_int *)p, (u_int)v));
72848281036SJohn Baldwin }
72948281036SJohn Baldwin 
73048281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */
73148281036SJohn Baldwin 
7328a1ee2d3SJung-uk Kim u_int	atomic_swap_int(volatile u_int *p, u_int v);
7338a1ee2d3SJung-uk Kim u_long	atomic_swap_long(volatile u_long *p, u_long v);
73448281036SJohn Baldwin 
73548281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */
73648281036SJohn Baldwin 
73786d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
73886d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
73986d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
74086d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
74186d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
74286d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
74386d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
74486d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
7453d673254SMark Johnston #define	atomic_cmpset_acq_char		atomic_cmpset_char
7463d673254SMark Johnston #define	atomic_cmpset_rel_char		atomic_cmpset_char
7473d673254SMark Johnston #define	atomic_fcmpset_acq_char		atomic_fcmpset_char
7483d673254SMark Johnston #define	atomic_fcmpset_rel_char		atomic_fcmpset_char
7498a6b1c8fSJohn Baldwin 
75086d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
75186d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
75286d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
75386d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
75486d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
75586d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
75686d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
75786d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
7583d673254SMark Johnston #define	atomic_cmpset_acq_short		atomic_cmpset_short
7593d673254SMark Johnston #define	atomic_cmpset_rel_short		atomic_cmpset_short
7603d673254SMark Johnston #define	atomic_fcmpset_acq_short	atomic_fcmpset_short
7613d673254SMark Johnston #define	atomic_fcmpset_rel_short	atomic_fcmpset_short
7628a6b1c8fSJohn Baldwin 
76386d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
76486d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
76586d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
76686d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
76786d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
76886d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
76986d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
77086d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
7718448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
7728448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
773e7a98aefSMateusz Guzik #define	atomic_fcmpset_acq_int		atomic_fcmpset_int
774e7a98aefSMateusz Guzik #define	atomic_fcmpset_rel_int		atomic_fcmpset_int
7758a6b1c8fSJohn Baldwin 
77686d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
77786d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
77886d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
77986d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
78086d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
78186d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
78286d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
78386d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
7848448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
7858448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
786e7a98aefSMateusz Guzik #define	atomic_fcmpset_acq_long		atomic_fcmpset_long
787e7a98aefSMateusz Guzik #define	atomic_fcmpset_rel_long		atomic_fcmpset_long
7888a6b1c8fSJohn Baldwin 
7898a1ee2d3SJung-uk Kim #define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
7908a1ee2d3SJung-uk Kim #define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
791ca0ec73cSConrad Meyer #define	atomic_testandset_acq_long	atomic_testandset_long
7928a1ee2d3SJung-uk Kim 
79348281036SJohn Baldwin /* Operations on 8-bit bytes. */
7948a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
7958a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
7968a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
7978a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
7988a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
7998a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
8008a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
8018a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
8028a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
8038a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
8048a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
8058a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
8068a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
8078a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
8083d673254SMark Johnston #define	atomic_cmpset_8		atomic_cmpset_char
8093d673254SMark Johnston #define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
8103d673254SMark Johnston #define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
8113d673254SMark Johnston #define	atomic_fcmpset_8	atomic_fcmpset_char
8123d673254SMark Johnston #define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
8133d673254SMark Johnston #define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
8148a6b1c8fSJohn Baldwin 
81548281036SJohn Baldwin /* Operations on 16-bit words. */
8168a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
8178a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
8188a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
8198a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
8208a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
8218a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
8228a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
8238a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
8248a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
8258a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
8268a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
8278a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
8288a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
8298a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
8303d673254SMark Johnston #define	atomic_cmpset_16	atomic_cmpset_short
8313d673254SMark Johnston #define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
8323d673254SMark Johnston #define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
8333d673254SMark Johnston #define	atomic_fcmpset_16	atomic_fcmpset_short
8343d673254SMark Johnston #define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
8353d673254SMark Johnston #define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
8368a6b1c8fSJohn Baldwin 
83748281036SJohn Baldwin /* Operations on 32-bit double words. */
8388a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
8398a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
8408a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
8418a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
8428a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
8438a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
8448a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
8458a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
8468a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
8478a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
8488a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
8498a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
8508a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
8518a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
8528a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
8538a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
8548a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
855e7a98aefSMateusz Guzik #define	atomic_fcmpset_32	atomic_fcmpset_int
856e7a98aefSMateusz Guzik #define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
857e7a98aefSMateusz Guzik #define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
8588a1ee2d3SJung-uk Kim #define	atomic_swap_32		atomic_swap_int
8598a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
8603c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
8618a1ee2d3SJung-uk Kim #define	atomic_testandset_32	atomic_testandset_int
862dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_32	atomic_testandclear_int
8638a6b1c8fSJohn Baldwin 
864db8bee42SAndriy Gapon #ifdef _KERNEL
86543bb1274SHans Petter Selasky /* Operations on 64-bit quad words. */
86643bb1274SHans Petter Selasky #define	atomic_cmpset_acq_64 atomic_cmpset_64
86743bb1274SHans Petter Selasky #define	atomic_cmpset_rel_64 atomic_cmpset_64
86825a1e0f6SHans Petter Selasky #define	atomic_fcmpset_acq_64 atomic_fcmpset_64
86925a1e0f6SHans Petter Selasky #define	atomic_fcmpset_rel_64 atomic_fcmpset_64
87043bb1274SHans Petter Selasky #define	atomic_fetchadd_acq_64	atomic_fetchadd_64
87143bb1274SHans Petter Selasky #define	atomic_fetchadd_rel_64	atomic_fetchadd_64
87243bb1274SHans Petter Selasky #define	atomic_add_acq_64 atomic_add_64
87343bb1274SHans Petter Selasky #define	atomic_add_rel_64 atomic_add_64
87443bb1274SHans Petter Selasky #define	atomic_subtract_acq_64 atomic_subtract_64
87543bb1274SHans Petter Selasky #define	atomic_subtract_rel_64 atomic_subtract_64
8763a91d106SKonstantin Belousov #define	atomic_load_64 atomic_load_acq_64
8773a91d106SKonstantin Belousov #define	atomic_store_64 atomic_store_rel_64
878db8bee42SAndriy Gapon #endif
87943bb1274SHans Petter Selasky 
88048281036SJohn Baldwin /* Operations on pointers. */
8816f0f8ccaSDag-Erling Smørgrav #define	atomic_set_ptr(p, v) \
8826f0f8ccaSDag-Erling Smørgrav 	atomic_set_int((volatile u_int *)(p), (u_int)(v))
8836f0f8ccaSDag-Erling Smørgrav #define	atomic_set_acq_ptr(p, v) \
8846f0f8ccaSDag-Erling Smørgrav 	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
8856f0f8ccaSDag-Erling Smørgrav #define	atomic_set_rel_ptr(p, v) \
8866f0f8ccaSDag-Erling Smørgrav 	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
8876f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_ptr(p, v) \
8886f0f8ccaSDag-Erling Smørgrav 	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
8896f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_acq_ptr(p, v) \
8906f0f8ccaSDag-Erling Smørgrav 	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
8916f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_rel_ptr(p, v) \
8926f0f8ccaSDag-Erling Smørgrav 	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
8936f0f8ccaSDag-Erling Smørgrav #define	atomic_add_ptr(p, v) \
8946f0f8ccaSDag-Erling Smørgrav 	atomic_add_int((volatile u_int *)(p), (u_int)(v))
8956f0f8ccaSDag-Erling Smørgrav #define	atomic_add_acq_ptr(p, v) \
8966f0f8ccaSDag-Erling Smørgrav 	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
8976f0f8ccaSDag-Erling Smørgrav #define	atomic_add_rel_ptr(p, v) \
8986f0f8ccaSDag-Erling Smørgrav 	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
8996f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_ptr(p, v) \
9006f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
9016f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_acq_ptr(p, v) \
9026f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
9036f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_rel_ptr(p, v) \
9046f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
9056f0f8ccaSDag-Erling Smørgrav #define	atomic_load_acq_ptr(p) \
9066f0f8ccaSDag-Erling Smørgrav 	atomic_load_acq_int((volatile u_int *)(p))
9076f0f8ccaSDag-Erling Smørgrav #define	atomic_store_rel_ptr(p, v) \
9086f0f8ccaSDag-Erling Smørgrav 	atomic_store_rel_int((volatile u_int *)(p), (v))
9096f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_ptr(dst, old, new) \
9106f0f8ccaSDag-Erling Smørgrav 	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
9116f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_acq_ptr(dst, old, new) \
9126c296ffaSBruce Evans 	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
9136c296ffaSBruce Evans 	    (u_int)(new))
9146f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_rel_ptr(dst, old, new) \
9156c296ffaSBruce Evans 	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
9166c296ffaSBruce Evans 	    (u_int)(new))
917e7a98aefSMateusz Guzik #define	atomic_fcmpset_ptr(dst, old, new) \
918e7a98aefSMateusz Guzik 	atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
919e7a98aefSMateusz Guzik #define	atomic_fcmpset_acq_ptr(dst, old, new) \
920e7a98aefSMateusz Guzik 	atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
921e7a98aefSMateusz Guzik 	    (u_int)(new))
922e7a98aefSMateusz Guzik #define	atomic_fcmpset_rel_ptr(dst, old, new) \
923e7a98aefSMateusz Guzik 	atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
924e7a98aefSMateusz Guzik 	    (u_int)(new))
9258a1ee2d3SJung-uk Kim #define	atomic_swap_ptr(p, v) \
9268a1ee2d3SJung-uk Kim 	atomic_swap_int((volatile u_int *)(p), (u_int)(v))
9276f0f8ccaSDag-Erling Smørgrav #define	atomic_readandclear_ptr(p) \
9286f0f8ccaSDag-Erling Smørgrav 	atomic_readandclear_int((volatile u_int *)(p))
929ccbdd9eeSJohn Baldwin 
930f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
9316c296ffaSBruce Evans 
93248cae112SKonstantin Belousov #if defined(_KERNEL)
93348cae112SKonstantin Belousov #define	mb()	__mbk()
93448cae112SKonstantin Belousov #define	wmb()	__mbk()
93548cae112SKonstantin Belousov #define	rmb()	__mbk()
93648cae112SKonstantin Belousov #else
93748cae112SKonstantin Belousov #define	mb()	__mbu()
93848cae112SKonstantin Belousov #define	wmb()	__mbu()
93948cae112SKonstantin Belousov #define	rmb()	__mbu()
94048cae112SKonstantin Belousov #endif
94148cae112SKonstantin Belousov 
942069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
943