xref: /freebsd/sys/i386/include/atomic.h (revision 065b12a70392edfbea830c48808f2316766f33bc)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
33a5f50ef9SJoerg Wunsch #endif
34a5f50ef9SJoerg Wunsch 
35422dcc24SKonstantin Belousov #define	mb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
36422dcc24SKonstantin Belousov #define	wmb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
37422dcc24SKonstantin Belousov #define	rmb()	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory")
38db7f0b97SKip Macy 
39069e9bc1SDoug Rabson /*
40f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
41f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
42069e9bc1SDoug Rabson  *
4347b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
4447b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
4547b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
4647b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
4747b8bc92SAlan Cox  *
4847b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
4947b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
5047b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
5147b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
5247b8bc92SAlan Cox  *
5347b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
5447b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
5547b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
5647b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
57f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
5847b8bc92SAlan Cox  *
5947b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
6047b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
6147b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
6247b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
63f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
64069e9bc1SDoug Rabson  */
65069e9bc1SDoug Rabson 
6647b8bc92SAlan Cox /*
6708c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
6808c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
6908c40841SAlan Cox  * built.
7008c40841SAlan Cox  *
7108c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
7208c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
7347b8bc92SAlan Cox  */
7448281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
75e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
7686d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
7786d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
7808c40841SAlan Cox 
79*065b12a7SPoul-Henning Kamp int	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
803c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
81819e370cSPoul-Henning Kamp 
828a6b1c8fSJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
838a6b1c8fSJohn Baldwin u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
848306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
858a6b1c8fSJohn Baldwin 
8648281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */
874c5aee92SMark Murray 
882a89a48fSJohn Baldwin /*
89f28e1c8fSBruce Evans  * For userland, always use lock prefixes so that the binaries will run
90f28e1c8fSBruce Evans  * on both SMP and !SMP systems.
912a89a48fSJohn Baldwin  */
922a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL)
937e4277e5SBruce Evans #define	MPLOCKED	"lock ; "
94d2f22d70SBruce Evans #else
9547b8bc92SAlan Cox #define	MPLOCKED
96d2f22d70SBruce Evans #endif
97069e9bc1SDoug Rabson 
9847b8bc92SAlan Cox /*
9986d2e48cSAttilio Rao  * The assembly is volatilized to avoid code chunk removal by the compiler.
10086d2e48cSAttilio Rao  * GCC aggressively reorders operations and memory clobbering is necessary
10186d2e48cSAttilio Rao  * in order to avoid that for memory barriers.
10247b8bc92SAlan Cox  */
103e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
10447b8bc92SAlan Cox static __inline void					\
10503e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
10647b8bc92SAlan Cox {							\
1077e4277e5SBruce Evans 	__asm __volatile(MPLOCKED OP			\
10880d52f16SJohn Baldwin 	: "=m" (*p)					\
10980d52f16SJohn Baldwin 	: CONS (V), "m" (*p));				\
1106d800f89SBruce Evans }							\
11186d2e48cSAttilio Rao 							\
11286d2e48cSAttilio Rao static __inline void					\
11386d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
11486d2e48cSAttilio Rao {							\
11586d2e48cSAttilio Rao 	__asm __volatile(MPLOCKED OP			\
11686d2e48cSAttilio Rao 	: "=m" (*p)					\
11786d2e48cSAttilio Rao 	: CONS (V), "m" (*p)				\
11886d2e48cSAttilio Rao 	: "memory");					\
11986d2e48cSAttilio Rao }							\
1206d800f89SBruce Evans struct __hack
1214c5aee92SMark Murray 
122819e370cSPoul-Henning Kamp /*
123819e370cSPoul-Henning Kamp  * Atomic compare and set, used by the mutex functions
124819e370cSPoul-Henning Kamp  *
125*065b12a7SPoul-Henning Kamp  * if (*dst == expect) *dst = src (all 32 bit words)
126819e370cSPoul-Henning Kamp  *
127819e370cSPoul-Henning Kamp  * Returns 0 on failure, non-zero on success
128819e370cSPoul-Henning Kamp  */
129819e370cSPoul-Henning Kamp 
130f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG
1314c5aee92SMark Murray 
1328448afceSAttilio Rao static __inline int
133*065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
1348448afceSAttilio Rao {
1358448afceSAttilio Rao 	u_char res;
1368448afceSAttilio Rao 
1378448afceSAttilio Rao 	__asm __volatile(
1388448afceSAttilio Rao 	"	pushfl ;		"
1398448afceSAttilio Rao 	"	cli ;			"
1408448afceSAttilio Rao 	"	cmpl	%3,%4 ;		"
1418448afceSAttilio Rao 	"	jne	1f ;		"
1428448afceSAttilio Rao 	"	movl	%2,%1 ;		"
1438448afceSAttilio Rao 	"1:				"
1448448afceSAttilio Rao 	"       sete	%0 ;		"
1458448afceSAttilio Rao 	"	popfl ;			"
1468448afceSAttilio Rao 	"# atomic_cmpset_int"
1478448afceSAttilio Rao 	: "=q" (res),			/* 0 */
1488448afceSAttilio Rao 	  "=m" (*dst)			/* 1 */
1498448afceSAttilio Rao 	: "r" (src),			/* 2 */
150*065b12a7SPoul-Henning Kamp 	  "r" (expect),			/* 3 */
1518448afceSAttilio Rao 	  "m" (*dst)			/* 4 */
1528448afceSAttilio Rao 	: "memory");
1538448afceSAttilio Rao 
1548448afceSAttilio Rao 	return (res);
1558448afceSAttilio Rao }
1564c5aee92SMark Murray 
157f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */
1584c5aee92SMark Murray 
1598448afceSAttilio Rao static __inline int
160*065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
1618448afceSAttilio Rao {
1628448afceSAttilio Rao 	u_char res;
1638448afceSAttilio Rao 
1648448afceSAttilio Rao 	__asm __volatile(
1658448afceSAttilio Rao 	"	" MPLOCKED "		"
1668448afceSAttilio Rao 	"	cmpxchgl %2,%1 ;	"
1678448afceSAttilio Rao 	"       sete	%0 ;		"
1688448afceSAttilio Rao 	"1:				"
1698448afceSAttilio Rao 	"# atomic_cmpset_int"
1708448afceSAttilio Rao 	: "=a" (res),			/* 0 */
1718448afceSAttilio Rao 	  "=m" (*dst)			/* 1 */
1728448afceSAttilio Rao 	: "r" (src),			/* 2 */
173*065b12a7SPoul-Henning Kamp 	  "a" (expect),			/* 3 */
1748448afceSAttilio Rao 	  "m" (*dst)			/* 4 */
1758448afceSAttilio Rao 	: "memory");
1768448afceSAttilio Rao 
1778448afceSAttilio Rao 	return (res);
1788448afceSAttilio Rao }
1794c5aee92SMark Murray 
180f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */
1814c5aee92SMark Murray 
1823c2bc2bfSJohn Baldwin /*
1833c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
1843c2bc2bfSJohn Baldwin  * the previous value of *p.
1853c2bc2bfSJohn Baldwin  */
1863c2bc2bfSJohn Baldwin static __inline u_int
1873c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
1883c2bc2bfSJohn Baldwin {
1893c2bc2bfSJohn Baldwin 
1903c2bc2bfSJohn Baldwin 	__asm __volatile(
1917e4277e5SBruce Evans 	"	" MPLOCKED "		"
1923c2bc2bfSJohn Baldwin 	"	xaddl	%0, %1 ;	"
1933c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
1943c2bc2bfSJohn Baldwin 	: "+r" (v),			/* 0 (result) */
1953c2bc2bfSJohn Baldwin 	  "=m" (*p)			/* 1 */
1963c2bc2bfSJohn Baldwin 	: "m" (*p));			/* 2 */
1973c2bc2bfSJohn Baldwin 
1983c2bc2bfSJohn Baldwin 	return (v);
1993c2bc2bfSJohn Baldwin }
2003c2bc2bfSJohn Baldwin 
20190baa95fSJohn Baldwin #if defined(_KERNEL) && !defined(SMP)
2024c5aee92SMark Murray 
203ccbdd9eeSJohn Baldwin /*
20457621b8bSJohn Baldwin  * We assume that a = b will do atomic loads and stores.  However, on a
20557621b8bSJohn Baldwin  * PentiumPro or higher, reads may pass writes, so for that case we have
20657621b8bSJohn Baldwin  * to use a serializing instruction (i.e. with LOCK) to do the load in
20757621b8bSJohn Baldwin  * SMP kernels.  For UP kernels, however, the cache of the single processor
20886d2e48cSAttilio Rao  * is always consistent, so we only need to take care of compiler.
209ccbdd9eeSJohn Baldwin  */
2109d979d89SJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
211ccbdd9eeSJohn Baldwin static __inline u_##TYPE				\
212ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
213ccbdd9eeSJohn Baldwin {							\
21486d2e48cSAttilio Rao 	u_##TYPE tmp;					\
21586d2e48cSAttilio Rao 							\
21686d2e48cSAttilio Rao 	tmp = *p;					\
21786d2e48cSAttilio Rao 	__asm __volatile("" : : : "memory");		\
21886d2e48cSAttilio Rao 	return (tmp);					\
219ccbdd9eeSJohn Baldwin }							\
220ccbdd9eeSJohn Baldwin 							\
221ccbdd9eeSJohn Baldwin static __inline void					\
222ccbdd9eeSJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
223ccbdd9eeSJohn Baldwin {							\
22486d2e48cSAttilio Rao 	__asm __volatile("" : : : "memory");		\
22557621b8bSJohn Baldwin 	*p = v;						\
2266d800f89SBruce Evans }							\
2276d800f89SBruce Evans struct __hack
2284c5aee92SMark Murray 
229f28e1c8fSBruce Evans #else /* !(_KERNEL && !SMP) */
230ccbdd9eeSJohn Baldwin 
2319d979d89SJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
2329d979d89SJohn Baldwin static __inline u_##TYPE				\
2339d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
2349d979d89SJohn Baldwin {							\
2359d979d89SJohn Baldwin 	u_##TYPE res;					\
2369d979d89SJohn Baldwin 							\
2377e4277e5SBruce Evans 	__asm __volatile(MPLOCKED LOP			\
238f28e1c8fSBruce Evans 	: "=a" (res),			/* 0 */		\
23980d52f16SJohn Baldwin 	  "=m" (*p)			/* 1 */		\
24080d52f16SJohn Baldwin 	: "m" (*p)			/* 2 */		\
24180d52f16SJohn Baldwin 	: "memory");					\
2429d979d89SJohn Baldwin 							\
2439d979d89SJohn Baldwin 	return (res);					\
2449d979d89SJohn Baldwin }							\
2459d979d89SJohn Baldwin 							\
2469d979d89SJohn Baldwin /*							\
2479d979d89SJohn Baldwin  * The XCHG instruction asserts LOCK automagically.	\
2489d979d89SJohn Baldwin  */							\
2499d979d89SJohn Baldwin static __inline void					\
2509d979d89SJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
2519d979d89SJohn Baldwin {							\
2529d979d89SJohn Baldwin 	__asm __volatile(SOP				\
25380d52f16SJohn Baldwin 	: "=m" (*p),			/* 0 */		\
2549d979d89SJohn Baldwin 	  "+r" (v)			/* 1 */		\
25586d2e48cSAttilio Rao 	: "m" (*p)			/* 2 */		\
25686d2e48cSAttilio Rao 	: "memory");					\
2576d800f89SBruce Evans }							\
2586d800f89SBruce Evans struct __hack
2594c5aee92SMark Murray 
260f28e1c8fSBruce Evans #endif /* _KERNEL && !SMP */
2614c5aee92SMark Murray 
26248281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
2638a6b1c8fSJohn Baldwin 
2648306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
2658306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
2668306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
2678306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
2688a6b1c8fSJohn Baldwin 
2698306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
2708306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
2718306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
2728306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
2738a6b1c8fSJohn Baldwin 
2748306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
2758306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
2768306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
2778306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
2788a6b1c8fSJohn Baldwin 
2798306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
2808306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
2818306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
2828306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
2839d979d89SJohn Baldwin 
2848306a37bSMark Murray ATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
2858306a37bSMark Murray ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
2868306a37bSMark Murray ATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
2878306a37bSMark Murray ATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
288ccbdd9eeSJohn Baldwin 
2898a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
290ccbdd9eeSJohn Baldwin #undef ATOMIC_STORE_LOAD
291ccbdd9eeSJohn Baldwin 
292f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
29348281036SJohn Baldwin 
29448281036SJohn Baldwin static __inline int
295*065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
29648281036SJohn Baldwin {
29748281036SJohn Baldwin 
298*065b12a7SPoul-Henning Kamp 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
29948281036SJohn Baldwin 	    (u_int)src));
30048281036SJohn Baldwin }
30148281036SJohn Baldwin 
3026eb4157fSPawel Jakub Dawidek static __inline u_long
3036eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
3046eb4157fSPawel Jakub Dawidek {
3056eb4157fSPawel Jakub Dawidek 
3066eb4157fSPawel Jakub Dawidek 	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
3076eb4157fSPawel Jakub Dawidek }
3086eb4157fSPawel Jakub Dawidek 
30948281036SJohn Baldwin /* Read the current value and store a zero in the destination. */
31048281036SJohn Baldwin #ifdef __GNUCLIKE_ASM
31148281036SJohn Baldwin 
31248281036SJohn Baldwin static __inline u_int
31348281036SJohn Baldwin atomic_readandclear_int(volatile u_int *addr)
31448281036SJohn Baldwin {
315f28e1c8fSBruce Evans 	u_int res;
31648281036SJohn Baldwin 
317f28e1c8fSBruce Evans 	res = 0;
31848281036SJohn Baldwin 	__asm __volatile(
31948281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
32048281036SJohn Baldwin 	"# atomic_readandclear_int"
321f28e1c8fSBruce Evans 	: "+r" (res),			/* 0 */
322f28e1c8fSBruce Evans 	  "=m" (*addr)			/* 1 */
32380d52f16SJohn Baldwin 	: "m" (*addr));
32448281036SJohn Baldwin 
325f28e1c8fSBruce Evans 	return (res);
32648281036SJohn Baldwin }
32748281036SJohn Baldwin 
32848281036SJohn Baldwin static __inline u_long
32948281036SJohn Baldwin atomic_readandclear_long(volatile u_long *addr)
33048281036SJohn Baldwin {
331f28e1c8fSBruce Evans 	u_long res;
33248281036SJohn Baldwin 
333f28e1c8fSBruce Evans 	res = 0;
33448281036SJohn Baldwin 	__asm __volatile(
33548281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
33648281036SJohn Baldwin 	"# atomic_readandclear_long"
3370b194ec8SBruce Evans 	: "+r" (res),			/* 0 */
338f28e1c8fSBruce Evans 	  "=m" (*addr)			/* 1 */
33980d52f16SJohn Baldwin 	: "m" (*addr));
34048281036SJohn Baldwin 
341f28e1c8fSBruce Evans 	return (res);
34248281036SJohn Baldwin }
34348281036SJohn Baldwin 
34448281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */
34548281036SJohn Baldwin 
346f28e1c8fSBruce Evans u_int	atomic_readandclear_int(volatile u_int *addr);
347f28e1c8fSBruce Evans u_long	atomic_readandclear_long(volatile u_long *addr);
34848281036SJohn Baldwin 
34948281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */
35048281036SJohn Baldwin 
35186d2e48cSAttilio Rao #define	atomic_set_acq_char		atomic_set_barr_char
35286d2e48cSAttilio Rao #define	atomic_set_rel_char		atomic_set_barr_char
35386d2e48cSAttilio Rao #define	atomic_clear_acq_char		atomic_clear_barr_char
35486d2e48cSAttilio Rao #define	atomic_clear_rel_char		atomic_clear_barr_char
35586d2e48cSAttilio Rao #define	atomic_add_acq_char		atomic_add_barr_char
35686d2e48cSAttilio Rao #define	atomic_add_rel_char		atomic_add_barr_char
35786d2e48cSAttilio Rao #define	atomic_subtract_acq_char	atomic_subtract_barr_char
35886d2e48cSAttilio Rao #define	atomic_subtract_rel_char	atomic_subtract_barr_char
3598a6b1c8fSJohn Baldwin 
36086d2e48cSAttilio Rao #define	atomic_set_acq_short		atomic_set_barr_short
36186d2e48cSAttilio Rao #define	atomic_set_rel_short		atomic_set_barr_short
36286d2e48cSAttilio Rao #define	atomic_clear_acq_short		atomic_clear_barr_short
36386d2e48cSAttilio Rao #define	atomic_clear_rel_short		atomic_clear_barr_short
36486d2e48cSAttilio Rao #define	atomic_add_acq_short		atomic_add_barr_short
36586d2e48cSAttilio Rao #define	atomic_add_rel_short		atomic_add_barr_short
36686d2e48cSAttilio Rao #define	atomic_subtract_acq_short	atomic_subtract_barr_short
36786d2e48cSAttilio Rao #define	atomic_subtract_rel_short	atomic_subtract_barr_short
3688a6b1c8fSJohn Baldwin 
36986d2e48cSAttilio Rao #define	atomic_set_acq_int		atomic_set_barr_int
37086d2e48cSAttilio Rao #define	atomic_set_rel_int		atomic_set_barr_int
37186d2e48cSAttilio Rao #define	atomic_clear_acq_int		atomic_clear_barr_int
37286d2e48cSAttilio Rao #define	atomic_clear_rel_int		atomic_clear_barr_int
37386d2e48cSAttilio Rao #define	atomic_add_acq_int		atomic_add_barr_int
37486d2e48cSAttilio Rao #define	atomic_add_rel_int		atomic_add_barr_int
37586d2e48cSAttilio Rao #define	atomic_subtract_acq_int		atomic_subtract_barr_int
37686d2e48cSAttilio Rao #define	atomic_subtract_rel_int		atomic_subtract_barr_int
3778448afceSAttilio Rao #define	atomic_cmpset_acq_int		atomic_cmpset_int
3788448afceSAttilio Rao #define	atomic_cmpset_rel_int		atomic_cmpset_int
3798a6b1c8fSJohn Baldwin 
38086d2e48cSAttilio Rao #define	atomic_set_acq_long		atomic_set_barr_long
38186d2e48cSAttilio Rao #define	atomic_set_rel_long		atomic_set_barr_long
38286d2e48cSAttilio Rao #define	atomic_clear_acq_long		atomic_clear_barr_long
38386d2e48cSAttilio Rao #define	atomic_clear_rel_long		atomic_clear_barr_long
38486d2e48cSAttilio Rao #define	atomic_add_acq_long		atomic_add_barr_long
38586d2e48cSAttilio Rao #define	atomic_add_rel_long		atomic_add_barr_long
38686d2e48cSAttilio Rao #define	atomic_subtract_acq_long	atomic_subtract_barr_long
38786d2e48cSAttilio Rao #define	atomic_subtract_rel_long	atomic_subtract_barr_long
3888448afceSAttilio Rao #define	atomic_cmpset_acq_long		atomic_cmpset_long
3898448afceSAttilio Rao #define	atomic_cmpset_rel_long		atomic_cmpset_long
3908a6b1c8fSJohn Baldwin 
39148281036SJohn Baldwin /* Operations on 8-bit bytes. */
3928a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
3938a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
3948a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
3958a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
3968a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
3978a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
3988a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
3998a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
4008a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
4018a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
4028a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
4038a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
4048a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
4058a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
4068a6b1c8fSJohn Baldwin 
40748281036SJohn Baldwin /* Operations on 16-bit words. */
4088a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
4098a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
4108a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
4118a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
4128a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
4138a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
4148a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
4158a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
4168a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
4178a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
4188a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
4198a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
4208a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
4218a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
4228a6b1c8fSJohn Baldwin 
42348281036SJohn Baldwin /* Operations on 32-bit double words. */
4248a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
4258a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
4268a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
4278a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
4288a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
4298a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
4308a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
4318a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
4328a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
4338a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
4348a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
4358a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
4368a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
4378a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
4388a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
4398a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
4408a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
4418a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
4423c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
4438a6b1c8fSJohn Baldwin 
44448281036SJohn Baldwin /* Operations on pointers. */
4456f0f8ccaSDag-Erling Smørgrav #define	atomic_set_ptr(p, v) \
4466f0f8ccaSDag-Erling Smørgrav 	atomic_set_int((volatile u_int *)(p), (u_int)(v))
4476f0f8ccaSDag-Erling Smørgrav #define	atomic_set_acq_ptr(p, v) \
4486f0f8ccaSDag-Erling Smørgrav 	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
4496f0f8ccaSDag-Erling Smørgrav #define	atomic_set_rel_ptr(p, v) \
4506f0f8ccaSDag-Erling Smørgrav 	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
4516f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_ptr(p, v) \
4526f0f8ccaSDag-Erling Smørgrav 	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
4536f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_acq_ptr(p, v) \
4546f0f8ccaSDag-Erling Smørgrav 	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
4556f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_rel_ptr(p, v) \
4566f0f8ccaSDag-Erling Smørgrav 	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
4576f0f8ccaSDag-Erling Smørgrav #define	atomic_add_ptr(p, v) \
4586f0f8ccaSDag-Erling Smørgrav 	atomic_add_int((volatile u_int *)(p), (u_int)(v))
4596f0f8ccaSDag-Erling Smørgrav #define	atomic_add_acq_ptr(p, v) \
4606f0f8ccaSDag-Erling Smørgrav 	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
4616f0f8ccaSDag-Erling Smørgrav #define	atomic_add_rel_ptr(p, v) \
4626f0f8ccaSDag-Erling Smørgrav 	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
4636f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_ptr(p, v) \
4646f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
4656f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_acq_ptr(p, v) \
4666f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
4676f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_rel_ptr(p, v) \
4686f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
4696f0f8ccaSDag-Erling Smørgrav #define	atomic_load_acq_ptr(p) \
4706f0f8ccaSDag-Erling Smørgrav 	atomic_load_acq_int((volatile u_int *)(p))
4716f0f8ccaSDag-Erling Smørgrav #define	atomic_store_rel_ptr(p, v) \
4726f0f8ccaSDag-Erling Smørgrav 	atomic_store_rel_int((volatile u_int *)(p), (v))
4736f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_ptr(dst, old, new) \
4746f0f8ccaSDag-Erling Smørgrav 	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
4756f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_acq_ptr(dst, old, new) \
4766c296ffaSBruce Evans 	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
4776c296ffaSBruce Evans 	    (u_int)(new))
4786f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_rel_ptr(dst, old, new) \
4796c296ffaSBruce Evans 	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
4806c296ffaSBruce Evans 	    (u_int)(new))
4816f0f8ccaSDag-Erling Smørgrav #define	atomic_readandclear_ptr(p) \
4826f0f8ccaSDag-Erling Smørgrav 	atomic_readandclear_int((volatile u_int *)(p))
483ccbdd9eeSJohn Baldwin 
484f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
4856c296ffaSBruce Evans 
486069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
487