xref: /freebsd/sys/i386/include/atomic.h (revision 5188b5f3c2d7e1c76e995fddde5a9678c3271510)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
33a5f50ef9SJoerg Wunsch #endif
34a5f50ef9SJoerg Wunsch 
353264fd70SJung-uk Kim #ifdef _KERNEL
363264fd70SJung-uk Kim #include <machine/md_var.h>
373264fd70SJung-uk Kim #include <machine/specialreg.h>
383264fd70SJung-uk Kim #endif
393264fd70SJung-uk Kim 
40fa9f322dSKonstantin Belousov #define	mb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc")
41fa9f322dSKonstantin Belousov #define	wmb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc")
42fa9f322dSKonstantin Belousov #define	rmb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc")
43db7f0b97SKip Macy 
44069e9bc1SDoug Rabson /*
45f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
46f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
47069e9bc1SDoug Rabson  *
4847b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
4947b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
5047b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
5147b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
5247b8bc92SAlan Cox  *
5347b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
5447b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
5547b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
5647b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
5747b8bc92SAlan Cox  *
5847b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
5947b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
6047b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
6147b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
628a1ee2d3SJung-uk Kim  * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
63f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
6447b8bc92SAlan Cox  *
6547b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
6647b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
6747b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
6847b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
698a1ee2d3SJung-uk Kim  * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
70f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
71069e9bc1SDoug Rabson  */
72069e9bc1SDoug Rabson 
7347b8bc92SAlan Cox /*
7408c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
7508c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
7608c40841SAlan Cox  * built.
7708c40841SAlan Cox  *
7808c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
7908c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
8047b8bc92SAlan Cox  */
8148281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
82e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
8386d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
8486d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
8508c40841SAlan Cox 
86065b12a7SPoul-Henning Kamp int	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
873c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
888a1ee2d3SJung-uk Kim int	atomic_testandset_int(volatile u_int *p, u_int v);
89819e370cSPoul-Henning Kamp 
90fa9f322dSKonstantin Belousov #define	ATOMIC_LOAD(TYPE, LOP)					\
91fa9f322dSKonstantin Belousov u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
92fa9f322dSKonstantin Belousov #define	ATOMIC_STORE(TYPE)					\
938306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
948a6b1c8fSJohn Baldwin 
95*5188b5f3SJung-uk Kim int		atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
963264fd70SJung-uk Kim uint64_t	atomic_load_acq_64(volatile uint64_t *);
973264fd70SJung-uk Kim void		atomic_store_rel_64(volatile uint64_t *, uint64_t);
98*5188b5f3SJung-uk Kim uint64_t	atomic_swap_64(volatile uint64_t *, uint64_t);
993264fd70SJung-uk Kim 
10048281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */
1014c5aee92SMark Murray 
1022a89a48fSJohn Baldwin /*
103f28e1c8fSBruce Evans  * For userland, always use lock prefixes so that the binaries will run
104f28e1c8fSBruce Evans  * on both SMP and !SMP systems.
1052a89a48fSJohn Baldwin  */
1062a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL)
1077e4277e5SBruce Evans #define	MPLOCKED	"lock ; "
108d2f22d70SBruce Evans #else
10947b8bc92SAlan Cox #define	MPLOCKED
110d2f22d70SBruce Evans #endif
111069e9bc1SDoug Rabson 
11247b8bc92SAlan Cox /*
11386d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
11486d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
11586d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
11647b8bc92SAlan Cox  */
117e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
11847b8bc92SAlan Cox static __inline void					\
11903e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
12047b8bc92SAlan Cox {							\
1217e4277e5SBruce Evans 	__asm __volatile(MPLOCKED OP			\
122fe94be3dSJung-uk Kim 	: "+m" (*p)					\
123fe94be3dSJung-uk Kim 	: CONS (V)					\
1247222d2fbSKonstantin Belousov 	: "cc");					\
1256d800f89SBruce Evans }							\
12686d2e48cSAttilio Rao 							\
12786d2e48cSAttilio Rao static __inline void					\
12886d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
12986d2e48cSAttilio Rao {							\
13086d2e48cSAttilio Rao 	__asm __volatile(MPLOCKED OP			\
131fe94be3dSJung-uk Kim 	: "+m" (*p)					\
132fe94be3dSJung-uk Kim 	: CONS (V)					\
1337222d2fbSKonstantin Belousov 	: "memory", "cc");				\
13486d2e48cSAttilio Rao }							\
1356d800f89SBruce Evans struct __hack
1364c5aee92SMark Murray 
137819e370cSPoul-Henning Kamp /*
138819e370cSPoul-Henning Kamp  * Atomic compare and set, used by the mutex functions
139819e370cSPoul-Henning Kamp  *
140065b12a7SPoul-Henning Kamp  * if (*dst == expect) *dst = src (all 32 bit words)
141819e370cSPoul-Henning Kamp  *
142819e370cSPoul-Henning Kamp  * Returns 0 on failure, non-zero on success
143819e370cSPoul-Henning Kamp  */
144819e370cSPoul-Henning Kamp 
145f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG
1464c5aee92SMark Murray 
1478448afceSAttilio Rao static __inline int
148065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
1498448afceSAttilio Rao {
1508448afceSAttilio Rao 	u_char res;
1518448afceSAttilio Rao 
1528448afceSAttilio Rao 	__asm __volatile(
1538448afceSAttilio Rao 	"	pushfl ;		"
1548448afceSAttilio Rao 	"	cli ;			"
155fe94be3dSJung-uk Kim 	"	cmpl	%3,%1 ;		"
1568448afceSAttilio Rao 	"	jne	1f ;		"
1578448afceSAttilio Rao 	"	movl	%2,%1 ;		"
1588448afceSAttilio Rao 	"1:				"
1598448afceSAttilio Rao 	"       sete	%0 ;		"
1608448afceSAttilio Rao 	"	popfl ;			"
1618448afceSAttilio Rao 	"# atomic_cmpset_int"
1628448afceSAttilio Rao 	: "=q" (res),			/* 0 */
163fe94be3dSJung-uk Kim 	  "+m" (*dst)			/* 1 */
1648448afceSAttilio Rao 	: "r" (src),			/* 2 */
165fe94be3dSJung-uk Kim 	  "r" (expect)			/* 3 */
1668448afceSAttilio Rao 	: "memory");
1678448afceSAttilio Rao 	return (res);
1688448afceSAttilio Rao }
1694c5aee92SMark Murray 
170f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */
1714c5aee92SMark Murray 
1728448afceSAttilio Rao static __inline int
173065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
1748448afceSAttilio Rao {
1758448afceSAttilio Rao 	u_char res;
1768448afceSAttilio Rao 
1778448afceSAttilio Rao 	__asm __volatile(
1788448afceSAttilio Rao 	"	" MPLOCKED "		"
179da255e4cSJung-uk Kim 	"	cmpxchgl %3,%1 ;	"
1808448afceSAttilio Rao 	"       sete	%0 ;		"
1818448afceSAttilio Rao 	"# atomic_cmpset_int"
182da255e4cSJung-uk Kim 	: "=q" (res),			/* 0 */
183da255e4cSJung-uk Kim 	  "+m" (*dst),			/* 1 */
184da255e4cSJung-uk Kim 	  "+a" (expect)			/* 2 */
185da255e4cSJung-uk Kim 	: "r" (src)			/* 3 */
1867222d2fbSKonstantin Belousov 	: "memory", "cc");
1878448afceSAttilio Rao 	return (res);
1888448afceSAttilio Rao }
1894c5aee92SMark Murray 
190f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */
1914c5aee92SMark Murray 
1923c2bc2bfSJohn Baldwin /*
1933c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
1943c2bc2bfSJohn Baldwin  * the previous value of *p.
1953c2bc2bfSJohn Baldwin  */
1963c2bc2bfSJohn Baldwin static __inline u_int
1973c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
1983c2bc2bfSJohn Baldwin {
1993c2bc2bfSJohn Baldwin 
2003c2bc2bfSJohn Baldwin 	__asm __volatile(
2017e4277e5SBruce Evans 	"	" MPLOCKED "		"
2023c2bc2bfSJohn Baldwin 	"	xaddl	%0,%1 ;		"
2033c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
204ee93d117SJung-uk Kim 	: "+r" (v),			/* 0 */
205fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */
206fe94be3dSJung-uk Kim 	: : "cc");
2073c2bc2bfSJohn Baldwin 	return (v);
2083c2bc2bfSJohn Baldwin }
2093c2bc2bfSJohn Baldwin 
2108a1ee2d3SJung-uk Kim static __inline int
2118a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v)
2128a1ee2d3SJung-uk Kim {
2138a1ee2d3SJung-uk Kim 	u_char res;
2148a1ee2d3SJung-uk Kim 
2158a1ee2d3SJung-uk Kim 	__asm __volatile(
2168a1ee2d3SJung-uk Kim 	"	" MPLOCKED "		"
2178a1ee2d3SJung-uk Kim 	"	btsl	%2,%1 ;		"
2188a1ee2d3SJung-uk Kim 	"	setc	%0 ;		"
2198a1ee2d3SJung-uk Kim 	"# atomic_testandset_int"
2208a1ee2d3SJung-uk Kim 	: "=q" (res),			/* 0 */
2218a1ee2d3SJung-uk Kim 	  "+m" (*p)			/* 1 */
2228a1ee2d3SJung-uk Kim 	: "Ir" (v & 0x1f)		/* 2 */
2238a1ee2d3SJung-uk Kim 	: "cc");
2248a1ee2d3SJung-uk Kim 	return (res);
2258a1ee2d3SJung-uk Kim }
2268a1ee2d3SJung-uk Kim 
227fa9f322dSKonstantin Belousov /*
228fa9f322dSKonstantin Belousov  * We assume that a = b will do atomic loads and stores.  Due to the
229fa9f322dSKonstantin Belousov  * IA32 memory model, a simple store guarantees release semantics.
230fa9f322dSKonstantin Belousov  *
231fa9f322dSKonstantin Belousov  * However, loads may pass stores, so for atomic_load_acq we have to
232fa9f322dSKonstantin Belousov  * ensure a Store/Load barrier to do the load in SMP kernels.  We use
233fa9f322dSKonstantin Belousov  * "lock cmpxchg" as recommended by the AMD Software Optimization
234fa9f322dSKonstantin Belousov  * Guide, and not mfence.  For UP kernels, however, the cache of the
235fa9f322dSKonstantin Belousov  * single processor is always consistent, so we only need to take care
236fa9f322dSKonstantin Belousov  * of the compiler.
237fa9f322dSKonstantin Belousov  */
238fa9f322dSKonstantin Belousov #define	ATOMIC_STORE(TYPE)				\
239fa9f322dSKonstantin Belousov static __inline void					\
240fa9f322dSKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
241fa9f322dSKonstantin Belousov {							\
2423a473025SAttilio Rao 	__compiler_membar();				\
243fa9f322dSKonstantin Belousov 	*p = v;						\
244fa9f322dSKonstantin Belousov }							\
245fa9f322dSKonstantin Belousov struct __hack
246fa9f322dSKonstantin Belousov 
24790baa95fSJohn Baldwin #if defined(_KERNEL) && !defined(SMP)
2484c5aee92SMark Murray 
249fa9f322dSKonstantin Belousov #define	ATOMIC_LOAD(TYPE, LOP)				\
250ccbdd9eeSJohn Baldwin static __inline u_##TYPE				\
251ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
252ccbdd9eeSJohn Baldwin {							\
25386d2e48cSAttilio Rao 	u_##TYPE tmp;					\
25486d2e48cSAttilio Rao 							\
25586d2e48cSAttilio Rao 	tmp = *p;					\
2563a473025SAttilio Rao 	__compiler_membar();				\
25786d2e48cSAttilio Rao 	return (tmp);					\
258ccbdd9eeSJohn Baldwin }							\
2596d800f89SBruce Evans struct __hack
2604c5aee92SMark Murray 
261f28e1c8fSBruce Evans #else /* !(_KERNEL && !SMP) */
262ccbdd9eeSJohn Baldwin 
263fa9f322dSKonstantin Belousov #define	ATOMIC_LOAD(TYPE, LOP)				\
2649d979d89SJohn Baldwin static __inline u_##TYPE				\
2659d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
2669d979d89SJohn Baldwin {							\
2679d979d89SJohn Baldwin 	u_##TYPE res;					\
2689d979d89SJohn Baldwin 							\
2697e4277e5SBruce Evans 	__asm __volatile(MPLOCKED LOP			\
270f28e1c8fSBruce Evans 	: "=a" (res),			/* 0 */		\
271fe94be3dSJung-uk Kim 	  "+m" (*p)			/* 1 */		\
272fe94be3dSJung-uk Kim 	: : "memory", "cc");				\
2739d979d89SJohn Baldwin 	return (res);					\
2749d979d89SJohn Baldwin }							\
2756d800f89SBruce Evans struct __hack
2764c5aee92SMark Murray 
277f28e1c8fSBruce Evans #endif /* _KERNEL && !SMP */
2784c5aee92SMark Murray 
2793264fd70SJung-uk Kim #ifdef _KERNEL
2803264fd70SJung-uk Kim 
2813264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS
282*5188b5f3SJung-uk Kim int		atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
283*5188b5f3SJung-uk Kim int		atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
2843264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i386(volatile uint64_t *);
2853264fd70SJung-uk Kim uint64_t	atomic_load_acq_64_i586(volatile uint64_t *);
2863264fd70SJung-uk Kim void		atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
2873264fd70SJung-uk Kim void		atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
288*5188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i386(volatile uint64_t *, uint64_t);
289*5188b5f3SJung-uk Kim uint64_t	atomic_swap_64_i586(volatile uint64_t *, uint64_t);
2903264fd70SJung-uk Kim #endif
2913264fd70SJung-uk Kim 
2923264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */
293*5188b5f3SJung-uk Kim static __inline int
294*5188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
295*5188b5f3SJung-uk Kim {
296*5188b5f3SJung-uk Kim 	volatile uint32_t *p;
297*5188b5f3SJung-uk Kim 	u_char res;
298*5188b5f3SJung-uk Kim 
299*5188b5f3SJung-uk Kim 	p = (volatile uint32_t *)dst;
300*5188b5f3SJung-uk Kim 	__asm __volatile(
301*5188b5f3SJung-uk Kim 	"	pushfl ;		"
302*5188b5f3SJung-uk Kim 	"	cli ;			"
303*5188b5f3SJung-uk Kim 	"	xorl	%1,%%eax ;	"
304*5188b5f3SJung-uk Kim 	"	xorl	%2,%%edx ;	"
305*5188b5f3SJung-uk Kim 	"	orl	%%edx,%%eax ;	"
306*5188b5f3SJung-uk Kim 	"	jne	1f ;		"
307*5188b5f3SJung-uk Kim 	"	movl	%4,%1 ;		"
308*5188b5f3SJung-uk Kim 	"	movl	%5,%2 ;		"
309*5188b5f3SJung-uk Kim 	"1:				"
310*5188b5f3SJung-uk Kim 	"	sete	%3 ;		"
311*5188b5f3SJung-uk Kim 	"	popfl"
312*5188b5f3SJung-uk Kim 	: "+A" (expect),		/* 0 */
313*5188b5f3SJung-uk Kim 	  "+m" (*p),			/* 1 */
314*5188b5f3SJung-uk Kim 	  "+m" (*(p + 1)),		/* 2 */
315*5188b5f3SJung-uk Kim 	  "=q" (res)			/* 3 */
316*5188b5f3SJung-uk Kim 	: "r" ((uint32_t)src),		/* 4 */
317*5188b5f3SJung-uk Kim 	  "r" ((uint32_t)(src >> 32))	/* 5 */
318*5188b5f3SJung-uk Kim 	: "memory", "cc");
319*5188b5f3SJung-uk Kim 	return (res);
320*5188b5f3SJung-uk Kim }
321*5188b5f3SJung-uk Kim 
3223264fd70SJung-uk Kim static __inline uint64_t
3233264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p)
3243264fd70SJung-uk Kim {
3253264fd70SJung-uk Kim 	volatile uint32_t *q;
3263264fd70SJung-uk Kim 	uint64_t res;
3273264fd70SJung-uk Kim 
3283264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
3293264fd70SJung-uk Kim 	__asm __volatile(
3303264fd70SJung-uk Kim 	"	pushfl ;		"
3313264fd70SJung-uk Kim 	"	cli ;			"
3323264fd70SJung-uk Kim 	"	movl	%1,%%eax ;	"
3333264fd70SJung-uk Kim 	"	movl	%2,%%edx ;	"
3343264fd70SJung-uk Kim 	"	popfl"
3353264fd70SJung-uk Kim 	: "=&A" (res)			/* 0 */
3363264fd70SJung-uk Kim 	: "m" (*q),			/* 1 */
3373264fd70SJung-uk Kim 	  "m" (*(q + 1))		/* 2 */
3383264fd70SJung-uk Kim 	: "memory");
3393264fd70SJung-uk Kim 	return (res);
3403264fd70SJung-uk Kim }
3413264fd70SJung-uk Kim 
3423264fd70SJung-uk Kim static __inline void
3433264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
3443264fd70SJung-uk Kim {
3453264fd70SJung-uk Kim 	volatile uint32_t *q;
3463264fd70SJung-uk Kim 
3473264fd70SJung-uk Kim 	q = (volatile uint32_t *)p;
3483264fd70SJung-uk Kim 	__asm __volatile(
3493264fd70SJung-uk Kim 	"	pushfl ;		"
3503264fd70SJung-uk Kim 	"	cli ;			"
3513264fd70SJung-uk Kim 	"	movl	%%eax,%0 ;	"
3523264fd70SJung-uk Kim 	"	movl	%%edx,%1 ;	"
3533264fd70SJung-uk Kim 	"	popfl"
3543264fd70SJung-uk Kim 	: "=m" (*q),			/* 0 */
3553264fd70SJung-uk Kim 	  "=m" (*(q + 1))		/* 1 */
3563264fd70SJung-uk Kim 	: "A" (v)			/* 2 */
3573264fd70SJung-uk Kim 	: "memory");
3583264fd70SJung-uk Kim }
3593264fd70SJung-uk Kim 
3603264fd70SJung-uk Kim static __inline uint64_t
361*5188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
362*5188b5f3SJung-uk Kim {
363*5188b5f3SJung-uk Kim 	volatile uint32_t *q;
364*5188b5f3SJung-uk Kim 	uint64_t res;
365*5188b5f3SJung-uk Kim 
366*5188b5f3SJung-uk Kim 	q = (volatile uint32_t *)p;
367*5188b5f3SJung-uk Kim 	__asm __volatile(
368*5188b5f3SJung-uk Kim 	"	pushfl ;		"
369*5188b5f3SJung-uk Kim 	"	cli ;			"
370*5188b5f3SJung-uk Kim 	"	movl	%1,%%eax ;	"
371*5188b5f3SJung-uk Kim 	"	movl	%2,%%edx ;	"
372*5188b5f3SJung-uk Kim 	"	movl	%4,%2 ;		"
373*5188b5f3SJung-uk Kim 	"	movl	%3,%1 ;		"
374*5188b5f3SJung-uk Kim 	"	popfl"
375*5188b5f3SJung-uk Kim 	: "=&A" (res),			/* 0 */
376*5188b5f3SJung-uk Kim 	  "+m" (*q),			/* 1 */
377*5188b5f3SJung-uk Kim 	  "+m" (*(q + 1))		/* 2 */
378*5188b5f3SJung-uk Kim 	: "r" ((uint32_t)v),		/* 3 */
379*5188b5f3SJung-uk Kim 	  "r" ((uint32_t)(v >> 32)));	/* 4 */
380*5188b5f3SJung-uk Kim 	return (res);
381*5188b5f3SJung-uk Kim }
382*5188b5f3SJung-uk Kim 
383*5188b5f3SJung-uk Kim static __inline int
384*5188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
385*5188b5f3SJung-uk Kim {
386*5188b5f3SJung-uk Kim 	u_char res;
387*5188b5f3SJung-uk Kim 
388*5188b5f3SJung-uk Kim 	__asm __volatile(
389*5188b5f3SJung-uk Kim 	"	" MPLOCKED "		"
390*5188b5f3SJung-uk Kim 	"	cmpxchg8b %1 ;		"
391*5188b5f3SJung-uk Kim 	"	sete	%0"
392*5188b5f3SJung-uk Kim 	: "=q" (res),			/* 0 */
393*5188b5f3SJung-uk Kim 	  "+m" (*dst),			/* 1 */
394*5188b5f3SJung-uk Kim 	  "+A" (expect)			/* 2 */
395*5188b5f3SJung-uk Kim 	: "b" ((uint32_t)src),		/* 3 */
396*5188b5f3SJung-uk Kim 	  "c" ((uint32_t)(src >> 32))	/* 4 */
397*5188b5f3SJung-uk Kim 	: "memory", "cc");
398*5188b5f3SJung-uk Kim 	return (res);
399*5188b5f3SJung-uk Kim }
400*5188b5f3SJung-uk Kim 
401*5188b5f3SJung-uk Kim static __inline uint64_t
4023264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p)
4033264fd70SJung-uk Kim {
4043264fd70SJung-uk Kim 	uint64_t res;
4053264fd70SJung-uk Kim 
4063264fd70SJung-uk Kim 	__asm __volatile(
4073264fd70SJung-uk Kim 	"	movl	%%ebx,%%eax ;	"
4083264fd70SJung-uk Kim 	"	movl	%%ecx,%%edx ;	"
4093264fd70SJung-uk Kim 	"	" MPLOCKED "		"
4103264fd70SJung-uk Kim 	"	cmpxchg8b %1"
4113264fd70SJung-uk Kim 	: "=&A" (res),			/* 0 */
4123264fd70SJung-uk Kim 	  "+m" (*p)			/* 1 */
4133264fd70SJung-uk Kim 	: : "memory", "cc");
4143264fd70SJung-uk Kim 	return (res);
4153264fd70SJung-uk Kim }
4163264fd70SJung-uk Kim 
4173264fd70SJung-uk Kim static __inline void
4183264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
4193264fd70SJung-uk Kim {
4203264fd70SJung-uk Kim 
4213264fd70SJung-uk Kim 	__asm __volatile(
4223264fd70SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
4233264fd70SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
4243264fd70SJung-uk Kim 	"1:				"
4253264fd70SJung-uk Kim 	"	" MPLOCKED "		"
4263264fd70SJung-uk Kim 	"	cmpxchg8b %0 ;		"
4273264fd70SJung-uk Kim 	"	jne	1b"
4283264fd70SJung-uk Kim 	: "+m" (*p),			/* 0 */
4293264fd70SJung-uk Kim 	  "+A" (v)			/* 1 */
4303264fd70SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
4313264fd70SJung-uk Kim }
4323264fd70SJung-uk Kim 
4333264fd70SJung-uk Kim static __inline uint64_t
434*5188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
435*5188b5f3SJung-uk Kim {
436*5188b5f3SJung-uk Kim 
437*5188b5f3SJung-uk Kim 	__asm __volatile(
438*5188b5f3SJung-uk Kim 	"	movl	%%eax,%%ebx ;	"
439*5188b5f3SJung-uk Kim 	"	movl	%%edx,%%ecx ;	"
440*5188b5f3SJung-uk Kim 	"1:				"
441*5188b5f3SJung-uk Kim 	"	" MPLOCKED "		"
442*5188b5f3SJung-uk Kim 	"	cmpxchg8b %0 ;		"
443*5188b5f3SJung-uk Kim 	"	jne	1b"
444*5188b5f3SJung-uk Kim 	: "+m" (*p),			/* 0 */
445*5188b5f3SJung-uk Kim 	  "+A" (v)			/* 1 */
446*5188b5f3SJung-uk Kim 	: : "ebx", "ecx", "memory", "cc");
447*5188b5f3SJung-uk Kim 	return (v);
448*5188b5f3SJung-uk Kim }
449*5188b5f3SJung-uk Kim 
450*5188b5f3SJung-uk Kim static __inline int
451*5188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
452*5188b5f3SJung-uk Kim {
453*5188b5f3SJung-uk Kim 
454*5188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
455*5188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i386(dst, expect, src));
456*5188b5f3SJung-uk Kim 	else
457*5188b5f3SJung-uk Kim 		return (atomic_cmpset_64_i586(dst, expect, src));
458*5188b5f3SJung-uk Kim }
459*5188b5f3SJung-uk Kim 
460*5188b5f3SJung-uk Kim static __inline uint64_t
4613264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p)
4623264fd70SJung-uk Kim {
4633264fd70SJung-uk Kim 
4643264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
4653264fd70SJung-uk Kim 		return (atomic_load_acq_64_i386(p));
4663264fd70SJung-uk Kim 	else
4673264fd70SJung-uk Kim 		return (atomic_load_acq_64_i586(p));
4683264fd70SJung-uk Kim }
4693264fd70SJung-uk Kim 
4703264fd70SJung-uk Kim static __inline void
4713264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v)
4723264fd70SJung-uk Kim {
4733264fd70SJung-uk Kim 
4743264fd70SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
4753264fd70SJung-uk Kim 		atomic_store_rel_64_i386(p, v);
4763264fd70SJung-uk Kim 	else
4773264fd70SJung-uk Kim 		atomic_store_rel_64_i586(p, v);
4783264fd70SJung-uk Kim }
4793264fd70SJung-uk Kim 
480*5188b5f3SJung-uk Kim static __inline uint64_t
481*5188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v)
482*5188b5f3SJung-uk Kim {
483*5188b5f3SJung-uk Kim 
484*5188b5f3SJung-uk Kim 	if ((cpu_feature & CPUID_CX8) == 0)
485*5188b5f3SJung-uk Kim 		return (atomic_swap_64_i386(p, v));
486*5188b5f3SJung-uk Kim 	else
487*5188b5f3SJung-uk Kim 		return (atomic_swap_64_i586(p, v));
488*5188b5f3SJung-uk Kim }
489*5188b5f3SJung-uk Kim 
4903264fd70SJung-uk Kim #endif /* _KERNEL */
4913264fd70SJung-uk Kim 
49248281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
4938a6b1c8fSJohn Baldwin 
4948306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
4958306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
4968306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
4978306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
4988a6b1c8fSJohn Baldwin 
4998306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
5008306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
5018306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
5028306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
5038a6b1c8fSJohn Baldwin 
5048306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
5058306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
5068306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
5078306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
5088a6b1c8fSJohn Baldwin 
5098306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
5108306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
5118306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
5128306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
5139d979d89SJohn Baldwin 
514fa9f322dSKonstantin Belousov ATOMIC_LOAD(char,  "cmpxchgb %b0,%1");
515fa9f322dSKonstantin Belousov ATOMIC_LOAD(short, "cmpxchgw %w0,%1");
516fa9f322dSKonstantin Belousov ATOMIC_LOAD(int,   "cmpxchgl %0,%1");
517fa9f322dSKonstantin Belousov ATOMIC_LOAD(long,  "cmpxchgl %0,%1");
518fa9f322dSKonstantin Belousov 
519fa9f322dSKonstantin Belousov ATOMIC_STORE(char);
520fa9f322dSKonstantin Belousov ATOMIC_STORE(short);
521fa9f322dSKonstantin Belousov ATOMIC_STORE(int);
522fa9f322dSKonstantin Belousov ATOMIC_STORE(long);
523ccbdd9eeSJohn Baldwin 
5248a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
525fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD
526fa9f322dSKonstantin Belousov #undef ATOMIC_STORE
527ccbdd9eeSJohn Baldwin 
528f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
52948281036SJohn Baldwin 
53048281036SJohn Baldwin static __inline int
531065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
53248281036SJohn Baldwin {
53348281036SJohn Baldwin 
534065b12a7SPoul-Henning Kamp 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
53548281036SJohn Baldwin 	    (u_int)src));
53648281036SJohn Baldwin }
53748281036SJohn Baldwin 
5386eb4157fSPawel Jakub Dawidek static __inline u_long
5396eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
5406eb4157fSPawel Jakub Dawidek {
5416eb4157fSPawel Jakub Dawidek 
5426eb4157fSPawel Jakub Dawidek 	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
5436eb4157fSPawel Jakub Dawidek }
5446eb4157fSPawel Jakub Dawidek 
5458a1ee2d3SJung-uk Kim static __inline int
5468a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v)
5478a1ee2d3SJung-uk Kim {
5488a1ee2d3SJung-uk Kim 
5498a1ee2d3SJung-uk Kim 	return (atomic_testandset_int((volatile u_int *)p, v));
5508a1ee2d3SJung-uk Kim }
5518a1ee2d3SJung-uk Kim 
5528a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */
55348281036SJohn Baldwin #ifdef __GNUCLIKE_ASM
55448281036SJohn Baldwin 
55548281036SJohn Baldwin static __inline u_int
5568a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v)
55748281036SJohn Baldwin {
55848281036SJohn Baldwin 
55948281036SJohn Baldwin 	__asm __volatile(
56048281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
5618a1ee2d3SJung-uk Kim 	"# atomic_swap_int"
5628a1ee2d3SJung-uk Kim 	: "+r" (v),			/* 0 */
563fe94be3dSJung-uk Kim 	  "+m" (*p));			/* 1 */
5648a1ee2d3SJung-uk Kim 	return (v);
56548281036SJohn Baldwin }
56648281036SJohn Baldwin 
56748281036SJohn Baldwin static __inline u_long
5688a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v)
56948281036SJohn Baldwin {
57048281036SJohn Baldwin 
5718a1ee2d3SJung-uk Kim 	return (atomic_swap_int((volatile u_int *)p, (u_int)v));
57248281036SJohn Baldwin }
57348281036SJohn Baldwin 
57448281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */
57548281036SJohn Baldwin 
5768a1ee2d3SJung-uk Kim u_int	atomic_swap_int(volatile u_int *p, u_int v);
5778a1ee2d3SJung-uk Kim u_long	atomic_swap_long(volatile u_long *p, u_long v);
57848281036SJohn Baldwin 
57948281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */
58048281036SJohn Baldwin 
58186d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
58286d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
58386d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
58486d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
58586d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
58686d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
58786d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
58886d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
5898a6b1c8fSJohn Baldwin 
59086d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
59186d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
59286d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
59386d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
59486d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
59586d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
59686d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
59786d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
5988a6b1c8fSJohn Baldwin 
59986d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
60086d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
60186d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
60286d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
60386d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
60486d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
60586d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
60686d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
6078448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
6088448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
6098a6b1c8fSJohn Baldwin 
61086d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
61186d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
61286d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
61386d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
61486d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
61586d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
61686d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
61786d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
6188448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
6198448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
6208a6b1c8fSJohn Baldwin 
6218a1ee2d3SJung-uk Kim #define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
6228a1ee2d3SJung-uk Kim #define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
6238a1ee2d3SJung-uk Kim 
62448281036SJohn Baldwin /* Operations on 8-bit bytes. */
6258a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
6268a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
6278a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
6288a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
6298a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
6308a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
6318a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
6328a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
6338a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
6348a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
6358a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
6368a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
6378a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
6388a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
6398a6b1c8fSJohn Baldwin 
64048281036SJohn Baldwin /* Operations on 16-bit words. */
6418a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
6428a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
6438a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
6448a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
6458a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
6468a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
6478a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
6488a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
6498a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
6508a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
6518a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
6528a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
6538a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
6548a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
6558a6b1c8fSJohn Baldwin 
65648281036SJohn Baldwin /* Operations on 32-bit double words. */
6578a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
6588a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
6598a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
6608a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
6618a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
6628a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
6638a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
6648a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
6658a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
6668a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
6678a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
6688a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
6698a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
6708a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
6718a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
6728a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
6738a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
6748a1ee2d3SJung-uk Kim #define	atomic_swap_32		atomic_swap_int
6758a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
6763c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
6778a1ee2d3SJung-uk Kim #define	atomic_testandset_32	atomic_testandset_int
6788a6b1c8fSJohn Baldwin 
67948281036SJohn Baldwin /* Operations on pointers. */
6806f0f8ccaSDag-Erling Smørgrav #define	atomic_set_ptr(p, v) \
6816f0f8ccaSDag-Erling Smørgrav 	atomic_set_int((volatile u_int *)(p), (u_int)(v))
6826f0f8ccaSDag-Erling Smørgrav #define	atomic_set_acq_ptr(p, v) \
6836f0f8ccaSDag-Erling Smørgrav 	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
6846f0f8ccaSDag-Erling Smørgrav #define	atomic_set_rel_ptr(p, v) \
6856f0f8ccaSDag-Erling Smørgrav 	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
6866f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_ptr(p, v) \
6876f0f8ccaSDag-Erling Smørgrav 	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
6886f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_acq_ptr(p, v) \
6896f0f8ccaSDag-Erling Smørgrav 	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
6906f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_rel_ptr(p, v) \
6916f0f8ccaSDag-Erling Smørgrav 	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
6926f0f8ccaSDag-Erling Smørgrav #define	atomic_add_ptr(p, v) \
6936f0f8ccaSDag-Erling Smørgrav 	atomic_add_int((volatile u_int *)(p), (u_int)(v))
6946f0f8ccaSDag-Erling Smørgrav #define	atomic_add_acq_ptr(p, v) \
6956f0f8ccaSDag-Erling Smørgrav 	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
6966f0f8ccaSDag-Erling Smørgrav #define	atomic_add_rel_ptr(p, v) \
6976f0f8ccaSDag-Erling Smørgrav 	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
6986f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_ptr(p, v) \
6996f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
7006f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_acq_ptr(p, v) \
7016f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
7026f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_rel_ptr(p, v) \
7036f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
7046f0f8ccaSDag-Erling Smørgrav #define	atomic_load_acq_ptr(p) \
7056f0f8ccaSDag-Erling Smørgrav 	atomic_load_acq_int((volatile u_int *)(p))
7066f0f8ccaSDag-Erling Smørgrav #define	atomic_store_rel_ptr(p, v) \
7076f0f8ccaSDag-Erling Smørgrav 	atomic_store_rel_int((volatile u_int *)(p), (v))
7086f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_ptr(dst, old, new) \
7096f0f8ccaSDag-Erling Smørgrav 	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
7106f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_acq_ptr(dst, old, new) \
7116c296ffaSBruce Evans 	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
7126c296ffaSBruce Evans 	    (u_int)(new))
7136f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_rel_ptr(dst, old, new) \
7146c296ffaSBruce Evans 	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
7156c296ffaSBruce Evans 	    (u_int)(new))
7168a1ee2d3SJung-uk Kim #define	atomic_swap_ptr(p, v) \
7178a1ee2d3SJung-uk Kim 	atomic_swap_int((volatile u_int *)(p), (u_int)(v))
7186f0f8ccaSDag-Erling Smørgrav #define	atomic_readandclear_ptr(p) \
7196f0f8ccaSDag-Erling Smørgrav 	atomic_readandclear_int((volatile u_int *)(p))
720ccbdd9eeSJohn Baldwin 
721f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
7226c296ffaSBruce Evans 
723069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
724