xref: /freebsd/sys/i386/include/atomic.h (revision dfdc9a05c6797d244cb733f108719fdcfa0e8379)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
33a5f50ef9SJoerg Wunsch #endif
34a5f50ef9SJoerg Wunsch 
353264fd70SJung-uk Kim #ifdef _KERNEL
363264fd70SJung-uk Kim #include <machine/md_var.h>
373264fd70SJung-uk Kim #include <machine/specialreg.h>
383264fd70SJung-uk Kim #endif
393264fd70SJung-uk Kim 
4048cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF
4148cae112SKonstantin Belousov /*
4248cae112SKonstantin Belousov  * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
4348cae112SKonstantin Belousov  *
4448cae112SKonstantin Belousov  * The open-coded number is used instead of the symbolic expression to
4548cae112SKonstantin Belousov  * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
4648cae112SKonstantin Belousov  * An assertion in i386/vm_machdep.c ensures that the value is correct.
4748cae112SKonstantin Belousov  */
4848cae112SKonstantin Belousov #define	__OFFSETOF_MONITORBUF	0x180
4948cae112SKonstantin Belousov 
5048cae112SKonstantin Belousov static __inline void
5148cae112SKonstantin Belousov __mbk(void)
5248cae112SKonstantin Belousov {
5348cae112SKonstantin Belousov 
5448cae112SKonstantin Belousov 	__asm __volatile("lock; addl $0,%%fs:%0"
5548cae112SKonstantin Belousov 	    : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc");
5648cae112SKonstantin Belousov }
5748cae112SKonstantin Belousov 
5848cae112SKonstantin Belousov static __inline void
5948cae112SKonstantin Belousov __mbu(void)
6048cae112SKonstantin Belousov {
6148cae112SKonstantin Belousov 
6248cae112SKonstantin Belousov 	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
6348cae112SKonstantin Belousov }
6448cae112SKonstantin Belousov #endif
65db7f0b97SKip Macy 
66069e9bc1SDoug Rabson /*
67f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
68f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
69069e9bc1SDoug Rabson  *
7047b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
7147b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
7247b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
7347b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
7447b8bc92SAlan Cox  *
7547b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
7647b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
7747b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
7847b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
7947b8bc92SAlan Cox  *
8047b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
8147b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
8247b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
8347b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
848a1ee2d3SJung-uk Kim  * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
85f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
8647b8bc92SAlan Cox  *
8747b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
8847b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
8947b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
9047b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
918a1ee2d3SJung-uk Kim  * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
92f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
93069e9bc1SDoug Rabson  */
94069e9bc1SDoug Rabson 
9547b8bc92SAlan Cox /*
9608c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
9708c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
9808c40841SAlan Cox  * built.
9908c40841SAlan Cox  *
10008c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
10108c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
10247b8bc92SAlan Cox  */
10348281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
104e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
10586d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
10686d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
10708c40841SAlan Cox 
108065b12a7SPoul-Henning Kamp int	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
1093c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
1108a1ee2d3SJung-uk Kim int	atomic_testandset_int(volatile u_int *p, u_int v);
111*dfdc9a05SSepherosa Ziehau int	atomic_testandclear_int(volatile u_int *p, u_int v);
1128954a9a4SKonstantin Belousov void	atomic_thread_fence_acq(void);
1138954a9a4SKonstantin Belousov void	atomic_thread_fence_acq_rel(void);
1148954a9a4SKonstantin Belousov void	atomic_thread_fence_rel(void);
1158954a9a4SKonstantin Belousov void	atomic_thread_fence_seq_cst(void);
116819e370cSPoul-Henning Kamp 
1177626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
118fa9f322dSKonstantin Belousov u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
119fa9f322dSKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
1208306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
1218a6b1c8fSJohn Baldwin 
1225188b5f3SJung-uk Kim int		atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
1233264fd70SJung-uk Kim uint64_t	atomic_load_acq_64(volatile uint64_t *);
1243264fd70SJung-uk Kim void		atomic_store_rel_64(volatile uint64_t *, uint64_t);
1255188b5f3SJung-uk Kim uint64_t	atomic_swap_64(volatile uint64_t *, uint64_t);
1263264fd70SJung-uk Kim 
12748281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */
1284c5aee92SMark Murray 
1292a89a48fSJohn Baldwin /*
130f28e1c8fSBruce Evans  * For userland, always use lock prefixes so that the binaries will run
131f28e1c8fSBruce Evans  * on both SMP and !SMP systems.
1322a89a48fSJohn Baldwin  */
1332a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL)
1347e4277e5SBruce Evans #define	MPLOCKED	"lock ; "
135d2f22d70SBruce Evans #else
13647b8bc92SAlan Cox #define	MPLOCKED
137d2f22d70SBruce Evans #endif
138069e9bc1SDoug Rabson 
13947b8bc92SAlan Cox /*
14086d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
14186d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
14286d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
14347b8bc92SAlan Cox  */
144e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
14547b8bc92SAlan Cox static __inline void					\
14603e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
14747b8bc92SAlan Cox {							\
1487e4277e5SBruce Evans 	__asm __volatile(MPLOCKED OP			\
149fe94be3dSJung-uk Kim 	: "+m" (*p)					\
150fe94be3dSJung-uk Kim 	: CONS (V)					\
1517222d2fbSKonstantin Belousov 	: "cc");					\
1526d800f89SBruce Evans }							\
15386d2e48cSAttilio Rao 							\
15486d2e48cSAttilio Rao static __inline void					\
15586d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
15686d2e48cSAttilio Rao {							\
15786d2e48cSAttilio Rao 	__asm __volatile(MPLOCKED OP			\
158fe94be3dSJung-uk Kim 	: "+m" (*p)					\
159fe94be3dSJung-uk Kim 	: CONS (V)					\
1607222d2fbSKonstantin Belousov 	: "memory", "cc");				\
16186d2e48cSAttilio Rao }							\
1626d800f89SBruce Evans struct __hack
1634c5aee92SMark Murray 
164819e370cSPoul-Henning Kamp /*
165819e370cSPoul-Henning Kamp  * Atomic compare and set, used by the mutex functions
166819e370cSPoul-Henning Kamp  *
167065b12a7SPoul-Henning Kamp  * if (*dst == expect) *dst = src (all 32 bit words)
168819e370cSPoul-Henning Kamp  *
169819e370cSPoul-Henning Kamp  * Returns 0 on failure, non-zero on success
170819e370cSPoul-Henning Kamp  */
171819e370cSPoul-Henning Kamp 
172f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG
1734c5aee92SMark Murray 
1748448afceSAttilio Rao static __inline int
175065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
1768448afceSAttilio Rao {
1778448afceSAttilio Rao 	u_char res;
1788448afceSAttilio Rao 
1798448afceSAttilio Rao 	__asm __volatile(
1808448afceSAttilio Rao 	"	pushfl ;		"
1818448afceSAttilio Rao 	"	cli ;			"
182fe94be3dSJung-uk Kim 	"	cmpl	%3,%1 ;		"
1838448afceSAttilio Rao 	"	jne	1f ;		"
1848448afceSAttilio Rao 	"	movl	%2,%1 ;		"
1858448afceSAttilio Rao 	"1:				"
1868448afceSAttilio Rao 	"       sete	%0 ;		"
1878448afceSAttilio Rao 	"	popfl ;			"
1888448afceSAttilio Rao 	"# atomic_cmpset_int"
1898448afceSAttilio Rao 	: "=q" (res),			/* 0 */
190fe94be3dSJung-uk Kim 	  "+m" (*dst)			/* 1 */
1918448afceSAttilio Rao 	: "r" (src),			/* 2 */
192fe94be3dSJung-uk Kim 	  "r" (expect)			/* 3 */
1938448afceSAttilio Rao 	: "memory");
1948448afceSAttilio Rao 	return (res);
1958448afceSAttilio Rao }
1964c5aee92SMark Murray 
197f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */
1984c5aee92SMark Murray 
1998448afceSAttilio Rao static __inline int
200065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
2018448afceSAttilio Rao {
2028448afceSAttilio Rao 	u_char res;
2038448afceSAttilio Rao 
2048448afceSAttilio Rao 	__asm __volatile(
2058448afceSAttilio Rao 	"	" MPLOCKED "		"
206da255e4cSJung-uk Kim 	"	cmpxchgl %3,%1 ;	"
2078448afceSAttilio Rao 	"       sete	%0 ;		"
2088448afceSAttilio Rao 	"# atomic_cmpset_int"
209da255e4cSJung-uk Kim 	: "=q" (res),			/* 0 */
210da255e4cSJung-uk Kim 	  "+m" (*dst),			/* 1 */
211da255e4cSJung-uk Kim 	  "+a" (expect)			/* 2 */
212da255e4cSJung-uk Kim 	: "r" (src)			/* 3 */
2137222d2fbSKonstantin Belousov 	: "memory", "cc");
2148448afceSAttilio Rao 	return (res);
2158448afceSAttilio Rao }
2164c5aee92SMark Murray 
217f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */
2184c5aee92SMark Murray 
2193c2bc2bfSJohn Baldwin /*
2203c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
2213c2bc2bfSJohn Baldwin  * the previous value of *p.
2223c2bc2bfSJohn Baldwin  */
2233c2bc2bfSJohn Baldwin static __inline u_int
2243c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
2253c2bc2bfSJohn Baldwin {
2263c2bc2bfSJohn Baldwin 
2273c2bc2bfSJohn Baldwin 	__asm __volatile(
2287e4277e5SBruce Evans 	"	" MPLOCKED "		"
2293c2bc2bfSJohn Baldwin 	"	xaddl	%0,%1 ;		"
2303c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
231ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
232fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
233fe94be3dSJung-uk Kim 	: : "cc");
2343c2bc2bfSJohn Baldwin 	return (v);
2353c2bc2bfSJohn Baldwin }
2363c2bc2bfSJohn Baldwin 
2378a1ee2d3SJung-uk Kim static __inline int
2388a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
2398a1ee2d3SJung-uk Kim {
2408a1ee2d3SJung-uk Kim 	u_char res;
2418a1ee2d3SJung-uk Kim 
2428a1ee2d3SJung-uk Kim 	__asm __volatile(
2438a1ee2d3SJung-uk Kim 	"	" MPLOCKED "		"
2448a1ee2d3SJung-uk Kim 	"	btsl	%2,%1 ;		"
2458a1ee2d3SJung-uk Kim 	"	setc	%0 ;		"
2468a1ee2d3SJung-uk Kim 	"# atomic_testandset_int"
2478a1ee2d3SJung-uk Kim 	: "=q" (res),			/* 0 */
2488a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2498a1ee2d3SJung-uk Kim 	: "Ir" (v & 0x1f)		/* 2 */
2508a1ee2d3SJung-uk Kim 	: "cc");
2518a1ee2d3SJung-uk Kim 	return (res);
2528a1ee2d3SJung-uk Kim }
2538a1ee2d3SJung-uk Kim 
254*dfdc9a05SSepherosa Ziehau static __inline int
255*dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v)
256*dfdc9a05SSepherosa Ziehau {
257*dfdc9a05SSepherosa Ziehau 	u_char res;
258*dfdc9a05SSepherosa Ziehau 
259*dfdc9a05SSepherosa Ziehau 	__asm __volatile(
260*dfdc9a05SSepherosa Ziehau 	"	" MPLOCKED "		"
261*dfdc9a05SSepherosa Ziehau 	"	btrl	%2,%1 ;		"
262*dfdc9a05SSepherosa Ziehau 	"	setc	%0 ;		"
263*dfdc9a05SSepherosa Ziehau 	"# atomic_testandclear_int"
264*dfdc9a05SSepherosa Ziehau 	: "=q" (res),			/* 0 */
265*dfdc9a05SSepherosa Ziehau 	  "+m" (*p)			/* 1 */
266*dfdc9a05SSepherosa Ziehau 	: "Ir" (v & 0x1f)		/* 2 */
267*dfdc9a05SSepherosa Ziehau 	: "cc");
268*dfdc9a05SSepherosa Ziehau 	return (res);
269*dfdc9a05SSepherosa Ziehau }
270*dfdc9a05SSepherosa Ziehau 
271fa9f322dSKonstantin Belousov /*
272fa9f322dSKonstantin Belousov  * We assume that a = b will do atomic loads and stores.  Due to the
273fa9f322dSKonstantin Belousov  * IA32 memory model, a simple store guarantees release semantics.
274fa9f322dSKonstantin Belousov  *
2757626d062SKonstantin Belousov  * However, a load may pass a store if they are performed on distinct
276dd5b6425SKonstantin Belousov  * addresses, so we need Store/Load barrier for sequentially
277dd5b6425SKonstantin Belousov  * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
278dd5b6425SKonstantin Belousov  * Store/Load barrier, as recommended by the AMD Software Optimization
279dd5b6425SKonstantin Belousov  * Guide, and not mfence.  In the kernel, we use a private per-cpu
2800b6476ecSKonstantin Belousov  * cache line for "mem", to avoid introducing false data
2810b6476ecSKonstantin Belousov  * dependencies.  In user space, we use the word at the top of the
2820b6476ecSKonstantin Belousov  * stack.
2837626d062SKonstantin Belousov  *
2847626d062SKonstantin Belousov  * For UP kernels, however, the memory of the single processor is
2857626d062SKonstantin Belousov  * always consistent, so we only need to stop the compiler from
2867626d062SKonstantin Belousov  * reordering accesses in a way that violates the semantics of acquire
2877626d062SKonstantin Belousov  * and release.
288fa9f322dSKonstantin Belousov  */
28948cae112SKonstantin Belousov 
2907626d062SKonstantin Belousov #if defined(_KERNEL)
2917626d062SKonstantin Belousov #if defined(SMP)
29248cae112SKonstantin Belousov #define	__storeload_barrier()	__mbk()
2937626d062SKonstantin Belousov #else /* _KERNEL && UP */
29448cae112SKonstantin Belousov #define	__storeload_barrier()	__compiler_membar()
2957626d062SKonstantin Belousov #endif /* SMP */
2967626d062SKonstantin Belousov #else /* !_KERNEL */
29748cae112SKonstantin Belousov #define	__storeload_barrier()	__mbu()
2987626d062SKonstantin Belousov #endif /* _KERNEL*/
2997626d062SKonstantin Belousov 
3007626d062SKonstantin Belousov #define	ATOMIC_LOAD(TYPE)					\
3019d979d89SJohn Baldwin static __inline u_##TYPE					\
3029d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
3039d979d89SJohn Baldwin {								\
3049d979d89SJohn Baldwin 	u_##TYPE res;						\
3059d979d89SJohn Baldwin 								\
3067626d062SKonstantin Belousov 	res = *p;						\
3077626d062SKonstantin Belousov 	__compiler_membar();					\
3089d979d89SJohn Baldwin 	return (res);						\
3099d979d89SJohn Baldwin }								\
3106d800f89SBruce Evans struct __hack
3114c5aee92SMark Murray 
3127626d062SKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
3137626d062SKonstantin Belousov static __inline void						\
3147626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
3157626d062SKonstantin Belousov {								\
3167626d062SKonstantin Belousov 								\
3177626d062SKonstantin Belousov 	__compiler_membar();					\
3187626d062SKonstantin Belousov 	*p = v;							\
3197626d062SKonstantin Belousov }								\
3207626d062SKonstantin Belousov struct __hack
3214c5aee92SMark Murray 
3228954a9a4SKonstantin Belousov static __inline void
3238954a9a4SKonstantin Belousov atomic_thread_fence_acq(void)
3248954a9a4SKonstantin Belousov {
3258954a9a4SKonstantin Belousov 
3268954a9a4SKonstantin Belousov 	__compiler_membar();
3278954a9a4SKonstantin Belousov }
3288954a9a4SKonstantin Belousov 
3298954a9a4SKonstantin Belousov static __inline void
3308954a9a4SKonstantin Belousov atomic_thread_fence_rel(void)
3318954a9a4SKonstantin Belousov {
3328954a9a4SKonstantin Belousov 
3338954a9a4SKonstantin Belousov 	__compiler_membar();
3348954a9a4SKonstantin Belousov }
3358954a9a4SKonstantin Belousov 
3368954a9a4SKonstantin Belousov static __inline void
3378954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void)
3388954a9a4SKonstantin Belousov {
3398954a9a4SKonstantin Belousov 
3408954a9a4SKonstantin Belousov 	__compiler_membar();
3418954a9a4SKonstantin Belousov }
3428954a9a4SKonstantin Belousov 
3438954a9a4SKonstantin Belousov static __inline void
3448954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void)
3458954a9a4SKonstantin Belousov {
3468954a9a4SKonstantin Belousov 
3478954a9a4SKonstantin Belousov 	__storeload_barrier();
3488954a9a4SKonstantin Belousov }
3498954a9a4SKonstantin Belousov 
3503264fd70SJung-uk Kim #ifdef _KERNEL
3513264fd70SJung-uk Kim 
3523264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS
3535188b5f3SJung-uk Kim int		atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
3545188b5f3SJung-uk Kim int		atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
3553264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i386(volatile uint64_t *);
3563264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i586(volatile uint64_t *);
3573264fd70SJung-uk Kim void		atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
3583264fd70SJung-uk Kim void		atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
3595188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i386(volatile uint64_t *, uint64_t);
3605188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i586(volatile uint64_t *, uint64_t);
3613264fd70SJung-uk Kim #endif
3623264fd70SJung-uk Kim 
3633264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */
3645188b5f3SJung-uk Kim static __inline int
3655188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
3665188b5f3SJung-uk Kim {
3675188b5f3SJung-uk Kim 	volatile uint32_t *p;
3685188b5f3SJung-uk Kim 	u_char res;
3695188b5f3SJung-uk Kim 
3705188b5f3SJung-uk Kim 	p = (volatile uint32_t *)dst;
3715188b5f3SJung-uk Kim 	__asm __volatile(
3725188b5f3SJung-uk Kim 	"	pushfl ;		"
3735188b5f3SJung-uk Kim 	"	cli ;			"
3745188b5f3SJung-uk Kim 	"	xorl	%1,%%eax ;	"
3755188b5f3SJung-uk Kim 	"	xorl	%2,%%edx ;	"
3765188b5f3SJung-uk Kim 	"	orl	%%edx,%%eax ;	"
3775188b5f3SJung-uk Kim 	"	jne	1f ;		"
3785188b5f3SJung-uk Kim 	"	movl	%4,%1 ;		"
3795188b5f3SJung-uk Kim 	"	movl	%5,%2 ;		"
3805188b5f3SJung-uk Kim 	"1:				"
3815188b5f3SJung-uk Kim 	"	sete	%3 ;		"
3825188b5f3SJung-uk Kim 	"	popfl"
3835188b5f3SJung-uk Kim 	: "+A" (expect),		/* 0 */
3845188b5f3SJung-uk Kim 	  "+m" (*p),			/* 1 */
3855188b5f3SJung-uk Kim 	  "+m" (*(p + 1)),		/* 2 */
3865188b5f3SJung-uk Kim 	  "=q" (res)			/* 3 */
3875188b5f3SJung-uk Kim 	: "r" ((uint32_t)src),		/* 4 */
3885188b5f3SJung-uk Kim 	  "r" ((uint32_t)(src >> 32))	/* 5 */
3895188b5f3SJung-uk Kim 	: "memory", "cc");
3905188b5f3SJung-uk Kim 	return (res);
3915188b5f3SJung-uk Kim }
3925188b5f3SJung-uk Kim 
3933264fd70SJung-uk Kim static __inline uint64_t
3943264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p)
3953264fd70SJung-uk Kim {
3963264fd70SJung-uk Kim 	volatile uint32_t *q;
3973264fd70SJung-uk Kim 	uint64_t res;
3983264fd70SJung-uk Kim 
3993264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
4003264fd70SJung-uk Kim 	__asm __volatile(
4013264fd70SJung-uk Kim 	"	pushfl ;		"
4023264fd70SJung-uk Kim 	"	cli ;			"
4033264fd70SJung-uk Kim 	"	movl	%1,%%eax ;	"
4043264fd70SJung-uk Kim 	"	movl	%2,%%edx ;	"
4053264fd70SJung-uk Kim 	"	popfl"
4063264fd70SJung-uk Kim 	: "=&A" (res)			/* 0 */
4073264fd70SJung-uk Kim 	: "m" (*q),			/* 1 */
4083264fd70SJung-uk Kim 	  "m" (*(q + 1))		/* 2 */
4093264fd70SJung-uk Kim 	: "memory");
4103264fd70SJung-uk Kim 	return (res);
4113264fd70SJung-uk Kim }
4123264fd70SJung-uk Kim 
4133264fd70SJung-uk Kim static __inline void
4143264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
4153264fd70SJung-uk Kim {
4163264fd70SJung-uk Kim 	volatile uint32_t *q;
4173264fd70SJung-uk Kim 
4183264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
4193264fd70SJung-uk Kim 	__asm __volatile(
4203264fd70SJung-uk Kim 	"	pushfl ;		"
4213264fd70SJung-uk Kim 	"	cli ;			"
4223264fd70SJung-uk Kim 	"	movl	%%eax,%0 ;	"
4233264fd70SJung-uk Kim 	"	movl	%%edx,%1 ;	"
4243264fd70SJung-uk Kim 	"	popfl"
4253264fd70SJung-uk Kim 	: "=m" (*q),			/* 0 */
4263264fd70SJung-uk Kim 	  "=m" (*(q + 1))		/* 1 */
4273264fd70SJung-uk Kim 	: "A" (v)			/* 2 */
4283264fd70SJung-uk Kim 	: "memory");
4293264fd70SJung-uk Kim }
4303264fd70SJung-uk Kim 
4313264fd70SJung-uk Kim static __inline uint64_t
4325188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
4335188b5f3SJung-uk Kim {
4345188b5f3SJung-uk Kim 	volatile uint32_t *q;
4355188b5f3SJung-uk Kim 	uint64_t res;
4365188b5f3SJung-uk Kim 
4375188b5f3SJung-uk Kim 	q = (volatile uint32_t *)p;
4385188b5f3SJung-uk Kim 	__asm __volatile(
4395188b5f3SJung-uk Kim 	"	pushfl ;		"
4405188b5f3SJung-uk Kim 	"	cli ;			"
4415188b5f3SJung-uk Kim 	"	movl	%1,%%eax ;	"
4425188b5f3SJung-uk Kim 	"	movl	%2,%%edx ;	"
4435188b5f3SJung-uk Kim 	"	movl	%4,%2 ;		"
4445188b5f3SJung-uk Kim 	"	movl	%3,%1 ;		"
4455188b5f3SJung-uk Kim 	"	popfl"
4465188b5f3SJung-uk Kim 	: "=&A" (res),			/* 0 */
4475188b5f3SJung-uk Kim 	  "+m" (*q),			/* 1 */
4485188b5f3SJung-uk Kim 	  "+m" (*(q + 1))		/* 2 */
4495188b5f3SJung-uk Kim 	: "r" ((uint32_t)v),		/* 3 */
4505188b5f3SJung-uk Kim 	  "r" ((uint32_t)(v >> 32)));	/* 4 */
4515188b5f3SJung-uk Kim 	return (res);
4525188b5f3SJung-uk Kim }
4535188b5f3SJung-uk Kim 
4545188b5f3SJung-uk Kim static __inline int
4555188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
4565188b5f3SJung-uk Kim {
4575188b5f3SJung-uk Kim 	u_char res;
4585188b5f3SJung-uk Kim 
4595188b5f3SJung-uk Kim 	__asm __volatile(
4605188b5f3SJung-uk Kim 	"	" MPLOCKED "		"
4615188b5f3SJung-uk Kim 	"	cmpxchg8b %1 ;		"
4625188b5f3SJung-uk Kim 	"	sete	%0"
4635188b5f3SJung-uk Kim 	: "=q" (res),			/* 0 */
4645188b5f3SJung-uk Kim 	  "+m" (*dst),			/* 1 */
4655188b5f3SJung-uk Kim 	  "+A" (expect)			/* 2 */
4665188b5f3SJung-uk Kim 	: "b" ((uint32_t)src),		/* 3 */
4675188b5f3SJung-uk Kim 	  "c" ((uint32_t)(src >> 32))	/* 4 */
4685188b5f3SJung-uk Kim 	: "memory", "cc");
4695188b5f3SJung-uk Kim 	return (res);
4705188b5f3SJung-uk Kim }
4715188b5f3SJung-uk Kim 
4725188b5f3SJung-uk Kim static __inline uint64_t
4733264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p)
4743264fd70SJung-uk Kim {
4753264fd70SJung-uk Kim 	uint64_t res;
4763264fd70SJung-uk Kim 
4773264fd70SJung-uk Kim 	__asm __volatile(
4783264fd70SJung-uk Kim 	"	movl	%%ebx,%%eax ;	"
4793264fd70SJung-uk Kim 	"	movl	%%ecx,%%edx ;	"
4803264fd70SJung-uk Kim 	"	" MPLOCKED "		"
4813264fd70SJung-uk Kim 	"	cmpxchg8b %1"
4823264fd70SJung-uk Kim 	: "=&A" (res),			/* 0 */
4833264fd70SJung-uk Kim 	  "+m" (*p)			/* 1 */
4843264fd70SJung-uk Kim 	: : "memory", "cc");
4853264fd70SJung-uk Kim 	return (res);
4863264fd70SJung-uk Kim }
4873264fd70SJung-uk Kim 
4883264fd70SJung-uk Kim static __inline void
4893264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
4903264fd70SJung-uk Kim {
4913264fd70SJung-uk Kim 
4923264fd70SJung-uk Kim 	__asm __volatile(
4933264fd70SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
4943264fd70SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
4953264fd70SJung-uk Kim 	"1:				"
4963264fd70SJung-uk Kim 	"	" MPLOCKED "		"
4973264fd70SJung-uk Kim 	"	cmpxchg8b %0 ;		"
4983264fd70SJung-uk Kim 	"	jne	1b"
4993264fd70SJung-uk Kim 	: "+m" (*p),			/* 0 */
5003264fd70SJung-uk Kim 	  "+A" (v)			/* 1 */
5013264fd70SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
5023264fd70SJung-uk Kim }
5033264fd70SJung-uk Kim 
5043264fd70SJung-uk Kim static __inline uint64_t
5055188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
5065188b5f3SJung-uk Kim {
5075188b5f3SJung-uk Kim 
5085188b5f3SJung-uk Kim 	__asm __volatile(
5095188b5f3SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
5105188b5f3SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
5115188b5f3SJung-uk Kim 	"1:				"
5125188b5f3SJung-uk Kim 	"	" MPLOCKED "		"
5135188b5f3SJung-uk Kim 	"	cmpxchg8b %0 ;		"
5145188b5f3SJung-uk Kim 	"	jne	1b"
5155188b5f3SJung-uk Kim 	: "+m" (*p),			/* 0 */
5165188b5f3SJung-uk Kim 	  "+A" (v)			/* 1 */
5175188b5f3SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
5185188b5f3SJung-uk Kim 	return (v);
5195188b5f3SJung-uk Kim }
5205188b5f3SJung-uk Kim 
5215188b5f3SJung-uk Kim static __inline int
5225188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
5235188b5f3SJung-uk Kim {
5245188b5f3SJung-uk Kim 
5255188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5265188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i386(dst, expect, src));
5275188b5f3SJung-uk Kim 	else
5285188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i586(dst, expect, src));
5295188b5f3SJung-uk Kim }
5305188b5f3SJung-uk Kim 
5315188b5f3SJung-uk Kim static __inline uint64_t
5323264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p)
5333264fd70SJung-uk Kim {
5343264fd70SJung-uk Kim 
5353264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5363264fd70SJung-uk Kim 		return (atomic_load_acq_64_i386(p));
5373264fd70SJung-uk Kim 	else
5383264fd70SJung-uk Kim 		return (atomic_load_acq_64_i586(p));
5393264fd70SJung-uk Kim }
5403264fd70SJung-uk Kim 
5413264fd70SJung-uk Kim static __inline void
5423264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v)
5433264fd70SJung-uk Kim {
5443264fd70SJung-uk Kim 
5453264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5463264fd70SJung-uk Kim 		atomic_store_rel_64_i386(p, v);
5473264fd70SJung-uk Kim 	else
5483264fd70SJung-uk Kim 		atomic_store_rel_64_i586(p, v);
5493264fd70SJung-uk Kim }
5503264fd70SJung-uk Kim 
5515188b5f3SJung-uk Kim static __inline uint64_t
5525188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v)
5535188b5f3SJung-uk Kim {
5545188b5f3SJung-uk Kim 
5555188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
5565188b5f3SJung-uk Kim 		return (atomic_swap_64_i386(p, v));
5575188b5f3SJung-uk Kim 	else
5585188b5f3SJung-uk Kim 		return (atomic_swap_64_i586(p, v));
5595188b5f3SJung-uk Kim }
5605188b5f3SJung-uk Kim 
5613264fd70SJung-uk Kim #endif /* _KERNEL */
5623264fd70SJung-uk Kim 
56348281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
5648a6b1c8fSJohn Baldwin 
5658306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
5668306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
5678306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
5688306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
5698a6b1c8fSJohn Baldwin 
5708306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
5718306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
5728306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
5738306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
5748a6b1c8fSJohn Baldwin 
5758306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
5768306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
5778306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
5788306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
5798a6b1c8fSJohn Baldwin 
5808306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
5818306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
5828306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
5838306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
5849d979d89SJohn Baldwin 
5857626d062SKonstantin Belousov #define	ATOMIC_LOADSTORE(TYPE)				\
5867626d062SKonstantin Belousov 	ATOMIC_LOAD(TYPE);				\
5877626d062SKonstantin Belousov 	ATOMIC_STORE(TYPE)
588fa9f322dSKonstantin Belousov 
5897626d062SKonstantin Belousov ATOMIC_LOADSTORE(char);
5907626d062SKonstantin Belousov ATOMIC_LOADSTORE(short);
5917626d062SKonstantin Belousov ATOMIC_LOADSTORE(int);
5927626d062SKonstantin Belousov ATOMIC_LOADSTORE(long);
593ccbdd9eeSJohn Baldwin 
5948a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
595fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
596fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
5977626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE
598ccbdd9eeSJohn Baldwin 
599f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
60048281036SJohn Baldwin 
60148281036SJohn Baldwin static __inline int
602065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
60348281036SJohn Baldwin {
60448281036SJohn Baldwin 
605065b12a7SPoul-Henning Kamp 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
60648281036SJohn Baldwin 	    (u_int)src));
60748281036SJohn Baldwin }
60848281036SJohn Baldwin 
6096eb4157fSPawel Jakub Dawidek static __inline u_long
6106eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
6116eb4157fSPawel Jakub Dawidek {
6126eb4157fSPawel Jakub Dawidek 
6136eb4157fSPawel Jakub Dawidek 	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
6146eb4157fSPawel Jakub Dawidek }
6156eb4157fSPawel Jakub Dawidek 
6168a1ee2d3SJung-uk Kim static __inline int
6178a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
6188a1ee2d3SJung-uk Kim {
6198a1ee2d3SJung-uk Kim 
6208a1ee2d3SJung-uk Kim 	return (atomic_testandset_int((volatile u_int *)p, v));
6218a1ee2d3SJung-uk Kim }
6228a1ee2d3SJung-uk Kim 
623*dfdc9a05SSepherosa Ziehau static __inline int
624*dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v)
625*dfdc9a05SSepherosa Ziehau {
626*dfdc9a05SSepherosa Ziehau 
627*dfdc9a05SSepherosa Ziehau 	return (atomic_testandclear_int((volatile u_int *)p, v));
628*dfdc9a05SSepherosa Ziehau }
629*dfdc9a05SSepherosa Ziehau 
6308a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
63148281036SJohn Baldwin #ifdef __GNUCLIKE_ASM
63248281036SJohn Baldwin 
63348281036SJohn Baldwin static __inline u_int
6348a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
63548281036SJohn Baldwin {
63648281036SJohn Baldwin 
63748281036SJohn Baldwin 	__asm __volatile(
63848281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
6398a1ee2d3SJung-uk Kim 	"# atomic_swap_int"
6408a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
641fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
6428a1ee2d3SJung-uk Kim 	return (v);
64348281036SJohn Baldwin }
64448281036SJohn Baldwin 
64548281036SJohn Baldwin static __inline u_long
6468a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
64748281036SJohn Baldwin {
64848281036SJohn Baldwin 
6498a1ee2d3SJung-uk Kim 	return (atomic_swap_int((volatile u_int *)p, (u_int)v));
65048281036SJohn Baldwin }
65148281036SJohn Baldwin 
65248281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */
65348281036SJohn Baldwin 
6548a1ee2d3SJung-uk Kim u_int	atomic_swap_int(volatile u_int *p, u_int v);
6558a1ee2d3SJung-uk Kim u_long	atomic_swap_long(volatile u_long *p, u_long v);
65648281036SJohn Baldwin 
65748281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */
65848281036SJohn Baldwin 
65986d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
66086d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
66186d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
66286d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
66386d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
66486d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
66586d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
66686d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
6678a6b1c8fSJohn Baldwin 
66886d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
66986d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
67086d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
67186d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
67286d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
67386d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
67486d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
67586d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
6768a6b1c8fSJohn Baldwin 
67786d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
67886d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
67986d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
68086d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
68186d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
68286d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
68386d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
68486d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
6858448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
6868448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
6878a6b1c8fSJohn Baldwin 
68886d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
68986d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
69086d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
69186d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
69286d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
69386d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
69486d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
69586d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
6968448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
6978448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
6988a6b1c8fSJohn Baldwin 
6998a1ee2d3SJung-uk Kim #define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
7008a1ee2d3SJung-uk Kim #define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
7018a1ee2d3SJung-uk Kim 
70248281036SJohn Baldwin /* Operations on 8-bit bytes. */
7038a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
7048a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
7058a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
7068a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
7078a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
7088a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
7098a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
7108a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
7118a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
7128a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
7138a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
7148a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
7158a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
7168a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
7178a6b1c8fSJohn Baldwin 
71848281036SJohn Baldwin /* Operations on 16-bit words. */
7198a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
7208a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
7218a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
7228a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
7238a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
7248a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
7258a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
7268a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
7278a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
7288a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
7298a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
7308a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
7318a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
7328a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
7338a6b1c8fSJohn Baldwin 
73448281036SJohn Baldwin /* Operations on 32-bit double words. */
7358a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
7368a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
7378a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
7388a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
7398a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
7408a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
7418a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
7428a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
7438a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
7448a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
7458a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
7468a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
7478a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
7488a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
7498a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
7508a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
7518a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
7528a1ee2d3SJung-uk Kim #define	atomic_swap_32		atomic_swap_int
7538a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
7543c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
7558a1ee2d3SJung-uk Kim #define	atomic_testandset_32	atomic_testandset_int
756*dfdc9a05SSepherosa Ziehau #define	atomic_testandclear_32	atomic_testandclear_int
7578a6b1c8fSJohn Baldwin 
75848281036SJohn Baldwin /* Operations on pointers. */
7596f0f8ccaSDag-Erling Smørgrav #define	atomic_set_ptr(p, v) \
7606f0f8ccaSDag-Erling Smørgrav 	atomic_set_int((volatile u_int *)(p), (u_int)(v))
7616f0f8ccaSDag-Erling Smørgrav #define	atomic_set_acq_ptr(p, v) \
7626f0f8ccaSDag-Erling Smørgrav 	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
7636f0f8ccaSDag-Erling Smørgrav #define	atomic_set_rel_ptr(p, v) \
7646f0f8ccaSDag-Erling Smørgrav 	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
7656f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_ptr(p, v) \
7666f0f8ccaSDag-Erling Smørgrav 	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
7676f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_acq_ptr(p, v) \
7686f0f8ccaSDag-Erling Smørgrav 	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
7696f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_rel_ptr(p, v) \
7706f0f8ccaSDag-Erling Smørgrav 	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
7716f0f8ccaSDag-Erling Smørgrav #define	atomic_add_ptr(p, v) \
7726f0f8ccaSDag-Erling Smørgrav 	atomic_add_int((volatile u_int *)(p), (u_int)(v))
7736f0f8ccaSDag-Erling Smørgrav #define	atomic_add_acq_ptr(p, v) \
7746f0f8ccaSDag-Erling Smørgrav 	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
7756f0f8ccaSDag-Erling Smørgrav #define	atomic_add_rel_ptr(p, v) \
7766f0f8ccaSDag-Erling Smørgrav 	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
7776f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_ptr(p, v) \
7786f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
7796f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_acq_ptr(p, v) \
7806f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
7816f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_rel_ptr(p, v) \
7826f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
7836f0f8ccaSDag-Erling Smørgrav #define	atomic_load_acq_ptr(p) \
7846f0f8ccaSDag-Erling Smørgrav 	atomic_load_acq_int((volatile u_int *)(p))
7856f0f8ccaSDag-Erling Smørgrav #define	atomic_store_rel_ptr(p, v) \
7866f0f8ccaSDag-Erling Smørgrav 	atomic_store_rel_int((volatile u_int *)(p), (v))
7876f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_ptr(dst, old, new) \
7886f0f8ccaSDag-Erling Smørgrav 	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
7896f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_acq_ptr(dst, old, new) \
7906c296ffaSBruce Evans 	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
7916c296ffaSBruce Evans 	    (u_int)(new))
7926f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_rel_ptr(dst, old, new) \
7936c296ffaSBruce Evans 	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
7946c296ffaSBruce Evans 	    (u_int)(new))
7958a1ee2d3SJung-uk Kim #define	atomic_swap_ptr(p, v) \
7968a1ee2d3SJung-uk Kim 	atomic_swap_int((volatile u_int *)(p), (u_int)(v))
7976f0f8ccaSDag-Erling Smørgrav #define	atomic_readandclear_ptr(p) \
7986f0f8ccaSDag-Erling Smørgrav 	atomic_readandclear_int((volatile u_int *)(p))
799ccbdd9eeSJohn Baldwin 
800f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
8016c296ffaSBruce Evans 
80248cae112SKonstantin Belousov #if defined(_KERNEL)
80348cae112SKonstantin Belousov #define	mb()	__mbk()
80448cae112SKonstantin Belousov #define	wmb()	__mbk()
80548cae112SKonstantin Belousov #define	rmb()	__mbk()
80648cae112SKonstantin Belousov #else
80748cae112SKonstantin Belousov #define	mb()	__mbu()
80848cae112SKonstantin Belousov #define	wmb()	__mbu()
80948cae112SKonstantin Belousov #define	rmb()	__mbu()
81048cae112SKonstantin Belousov #endif
81148cae112SKonstantin Belousov 
812069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
813