xref: /freebsd/sys/i386/include/atomic.h (revision db7f0b974f2ab273540a458ab50929ccbb1aa581)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define	_MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_
32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite
33a5f50ef9SJoerg Wunsch #endif
34a5f50ef9SJoerg Wunsch 
35db7f0b97SKip Macy 
36db7f0b97SKip Macy #if defined(I686_CPU)
37db7f0b97SKip Macy #define mb()	__asm__ __volatile__ ("mfence;": : :"memory")
38db7f0b97SKip Macy #define wmb()	__asm__ __volatile__ ("sfence;": : :"memory")
39db7f0b97SKip Macy #define rmb()	__asm__ __volatile__ ("lfence;": : :"memory")
40db7f0b97SKip Macy #else
41db7f0b97SKip Macy /*
42db7f0b97SKip Macy  * do we need a serializing instruction?
43db7f0b97SKip Macy  */
44db7f0b97SKip Macy #define mb()
45db7f0b97SKip Macy #define wmb()
46db7f0b97SKip Macy #define rmb()
47db7f0b97SKip Macy #endif
48db7f0b97SKip Macy 
49db7f0b97SKip Macy 
50069e9bc1SDoug Rabson /*
51f28e1c8fSBruce Evans  * Various simple operations on memory, each of which is atomic in the
52f28e1c8fSBruce Evans  * presence of interrupts and multiple processors.
53069e9bc1SDoug Rabson  *
5447b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
5547b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
5647b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
5747b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
5847b8bc92SAlan Cox  *
5947b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
6047b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
6147b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
6247b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
6347b8bc92SAlan Cox  *
6447b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
6547b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
6647b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
6747b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
68f28e1c8fSBruce Evans  * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
6947b8bc92SAlan Cox  *
7047b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
7147b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
7247b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
7347b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
74f28e1c8fSBruce Evans  * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
75069e9bc1SDoug Rabson  */
76069e9bc1SDoug Rabson 
7747b8bc92SAlan Cox /*
7808c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
7908c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
8008c40841SAlan Cox  * built.
8108c40841SAlan Cox  *
8208c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
8308c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
8447b8bc92SAlan Cox  */
8548281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
86e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
878306a37bSMark Murray void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
8808c40841SAlan Cox 
89b4645202SJohn Baldwin int	atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
903c2bc2bfSJohn Baldwin u_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
91819e370cSPoul-Henning Kamp 
928a6b1c8fSJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
938a6b1c8fSJohn Baldwin u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
948306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
958a6b1c8fSJohn Baldwin 
9648281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */
974c5aee92SMark Murray 
982a89a48fSJohn Baldwin /*
99f28e1c8fSBruce Evans  * For userland, always use lock prefixes so that the binaries will run
100f28e1c8fSBruce Evans  * on both SMP and !SMP systems.
1012a89a48fSJohn Baldwin  */
1022a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL)
1037e4277e5SBruce Evans #define	MPLOCKED	"lock ; "
104d2f22d70SBruce Evans #else
10547b8bc92SAlan Cox #define	MPLOCKED
106d2f22d70SBruce Evans #endif
107069e9bc1SDoug Rabson 
10847b8bc92SAlan Cox /*
10947b8bc92SAlan Cox  * The assembly is volatilized to demark potential before-and-after side
11047b8bc92SAlan Cox  * effects if an interrupt or SMP collision were to occur.
11147b8bc92SAlan Cox  */
112e4e991e1SJohn Baldwin #define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
11347b8bc92SAlan Cox static __inline void					\
11403e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
11547b8bc92SAlan Cox {							\
1167e4277e5SBruce Evans 	__asm __volatile(MPLOCKED OP			\
11780d52f16SJohn Baldwin 	: "=m" (*p)					\
11880d52f16SJohn Baldwin 	: CONS (V), "m" (*p));				\
1196d800f89SBruce Evans }							\
1206d800f89SBruce Evans struct __hack
1214c5aee92SMark Murray 
122819e370cSPoul-Henning Kamp /*
123819e370cSPoul-Henning Kamp  * Atomic compare and set, used by the mutex functions
124819e370cSPoul-Henning Kamp  *
125819e370cSPoul-Henning Kamp  * if (*dst == exp) *dst = src (all 32 bit words)
126819e370cSPoul-Henning Kamp  *
127819e370cSPoul-Henning Kamp  * Returns 0 on failure, non-zero on success
128819e370cSPoul-Henning Kamp  */
129819e370cSPoul-Henning Kamp 
130f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG
1314c5aee92SMark Murray 
132819e370cSPoul-Henning Kamp static __inline int
133819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
134819e370cSPoul-Henning Kamp {
13526ab2d1dSBruce Evans 	u_char res;
136819e370cSPoul-Henning Kamp 
137819e370cSPoul-Henning Kamp 	__asm __volatile(
138819e370cSPoul-Henning Kamp 	"	pushfl ;		"
139819e370cSPoul-Henning Kamp 	"	cli ;			"
14026ab2d1dSBruce Evans 	"	cmpl	%3,%4 ;		"
141819e370cSPoul-Henning Kamp 	"	jne	1f ;		"
14280d52f16SJohn Baldwin 	"	movl	%2,%1 ;		"
143819e370cSPoul-Henning Kamp 	"1:				"
14426ab2d1dSBruce Evans 	"       sete	%0 ;		"
145819e370cSPoul-Henning Kamp 	"	popfl ;			"
146819e370cSPoul-Henning Kamp 	"# atomic_cmpset_int"
14726ab2d1dSBruce Evans 	: "=q" (res),			/* 0 */
14880d52f16SJohn Baldwin 	  "=m" (*dst)			/* 1 */
14980d52f16SJohn Baldwin 	: "r" (src),			/* 2 */
15026ab2d1dSBruce Evans 	  "r" (exp),			/* 3 */
15126ab2d1dSBruce Evans 	  "m" (*dst)			/* 4 */
152819e370cSPoul-Henning Kamp 	: "memory");
153819e370cSPoul-Henning Kamp 
154819e370cSPoul-Henning Kamp 	return (res);
155819e370cSPoul-Henning Kamp }
1564c5aee92SMark Murray 
157f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */
1584c5aee92SMark Murray 
159819e370cSPoul-Henning Kamp static __inline int
160819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
161819e370cSPoul-Henning Kamp {
16226ab2d1dSBruce Evans 	u_char res;
163819e370cSPoul-Henning Kamp 
164819e370cSPoul-Henning Kamp 	__asm __volatile(
1657e4277e5SBruce Evans 	"	" MPLOCKED "		"
16680d52f16SJohn Baldwin 	"	cmpxchgl %2,%1 ;	"
16726ab2d1dSBruce Evans 	"       sete	%0 ;		"
168819e370cSPoul-Henning Kamp 	"1:				"
169819e370cSPoul-Henning Kamp 	"# atomic_cmpset_int"
17026ab2d1dSBruce Evans 	: "=a" (res),			/* 0 */
17180d52f16SJohn Baldwin 	  "=m" (*dst)			/* 1 */
17280d52f16SJohn Baldwin 	: "r" (src),			/* 2 */
17326ab2d1dSBruce Evans 	  "a" (exp),			/* 3 */
17426ab2d1dSBruce Evans 	  "m" (*dst)			/* 4 */
175819e370cSPoul-Henning Kamp 	: "memory");
176819e370cSPoul-Henning Kamp 
177819e370cSPoul-Henning Kamp 	return (res);
178819e370cSPoul-Henning Kamp }
1794c5aee92SMark Murray 
180f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */
1814c5aee92SMark Murray 
1823c2bc2bfSJohn Baldwin /*
1833c2bc2bfSJohn Baldwin  * Atomically add the value of v to the integer pointed to by p and return
1843c2bc2bfSJohn Baldwin  * the previous value of *p.
1853c2bc2bfSJohn Baldwin  */
1863c2bc2bfSJohn Baldwin static __inline u_int
1873c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v)
1883c2bc2bfSJohn Baldwin {
1893c2bc2bfSJohn Baldwin 
1903c2bc2bfSJohn Baldwin 	__asm __volatile(
1917e4277e5SBruce Evans 	"	" MPLOCKED "		"
1923c2bc2bfSJohn Baldwin 	"	xaddl	%0, %1 ;	"
1933c2bc2bfSJohn Baldwin 	"# atomic_fetchadd_int"
1943c2bc2bfSJohn Baldwin 	: "+r" (v),			/* 0 (result) */
1953c2bc2bfSJohn Baldwin 	  "=m" (*p)			/* 1 */
1963c2bc2bfSJohn Baldwin 	: "m" (*p));			/* 2 */
1973c2bc2bfSJohn Baldwin 
1983c2bc2bfSJohn Baldwin 	return (v);
1993c2bc2bfSJohn Baldwin }
2003c2bc2bfSJohn Baldwin 
20190baa95fSJohn Baldwin #if defined(_KERNEL) && !defined(SMP)
2024c5aee92SMark Murray 
203ccbdd9eeSJohn Baldwin /*
20457621b8bSJohn Baldwin  * We assume that a = b will do atomic loads and stores.  However, on a
20557621b8bSJohn Baldwin  * PentiumPro or higher, reads may pass writes, so for that case we have
20657621b8bSJohn Baldwin  * to use a serializing instruction (i.e. with LOCK) to do the load in
20757621b8bSJohn Baldwin  * SMP kernels.  For UP kernels, however, the cache of the single processor
20857621b8bSJohn Baldwin  * is always consistent, so we don't need any memory barriers.
209ccbdd9eeSJohn Baldwin  */
2109d979d89SJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
211ccbdd9eeSJohn Baldwin static __inline u_##TYPE				\
212ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
213ccbdd9eeSJohn Baldwin {							\
214ccbdd9eeSJohn Baldwin 	return (*p);					\
215ccbdd9eeSJohn Baldwin }							\
216ccbdd9eeSJohn Baldwin 							\
217ccbdd9eeSJohn Baldwin static __inline void					\
218ccbdd9eeSJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
219ccbdd9eeSJohn Baldwin {							\
22057621b8bSJohn Baldwin 	*p = v;						\
2216d800f89SBruce Evans }							\
2226d800f89SBruce Evans struct __hack
2234c5aee92SMark Murray 
224f28e1c8fSBruce Evans #else /* !(_KERNEL && !SMP) */
225ccbdd9eeSJohn Baldwin 
2269d979d89SJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
2279d979d89SJohn Baldwin static __inline u_##TYPE				\
2289d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
2299d979d89SJohn Baldwin {							\
2309d979d89SJohn Baldwin 	u_##TYPE res;					\
2319d979d89SJohn Baldwin 							\
2327e4277e5SBruce Evans 	__asm __volatile(MPLOCKED LOP			\
233f28e1c8fSBruce Evans 	: "=a" (res),			/* 0 */		\
23480d52f16SJohn Baldwin 	  "=m" (*p)			/* 1 */		\
23580d52f16SJohn Baldwin 	: "m" (*p)			/* 2 */		\
23680d52f16SJohn Baldwin 	: "memory");					\
2379d979d89SJohn Baldwin 							\
2389d979d89SJohn Baldwin 	return (res);					\
2399d979d89SJohn Baldwin }							\
2409d979d89SJohn Baldwin 							\
2419d979d89SJohn Baldwin /*							\
2429d979d89SJohn Baldwin  * The XCHG instruction asserts LOCK automagically.	\
2439d979d89SJohn Baldwin  */							\
2449d979d89SJohn Baldwin static __inline void					\
2459d979d89SJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
2469d979d89SJohn Baldwin {							\
2479d979d89SJohn Baldwin 	__asm __volatile(SOP				\
24880d52f16SJohn Baldwin 	: "=m" (*p),			/* 0 */		\
2499d979d89SJohn Baldwin 	  "+r" (v)			/* 1 */		\
25080d52f16SJohn Baldwin 	: "m" (*p));			/* 2 */		\
2516d800f89SBruce Evans }							\
2526d800f89SBruce Evans struct __hack
2534c5aee92SMark Murray 
254f28e1c8fSBruce Evans #endif /* _KERNEL && !SMP */
2554c5aee92SMark Murray 
25648281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
2578a6b1c8fSJohn Baldwin 
2588306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
2598306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
2608306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
2618306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
2628a6b1c8fSJohn Baldwin 
2638306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
2648306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
2658306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
2668306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
2678a6b1c8fSJohn Baldwin 
2688306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
2698306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
2708306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
2718306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
2728a6b1c8fSJohn Baldwin 
2738306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
2748306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
2758306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
2768306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
2779d979d89SJohn Baldwin 
2788306a37bSMark Murray ATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
2798306a37bSMark Murray ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
2808306a37bSMark Murray ATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
2818306a37bSMark Murray ATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
282ccbdd9eeSJohn Baldwin 
2838a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
284ccbdd9eeSJohn Baldwin #undef ATOMIC_STORE_LOAD
285ccbdd9eeSJohn Baldwin 
286f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS
28748281036SJohn Baldwin 
28848281036SJohn Baldwin static __inline int
28948281036SJohn Baldwin atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
29048281036SJohn Baldwin {
29148281036SJohn Baldwin 
29248281036SJohn Baldwin 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
29348281036SJohn Baldwin 	    (u_int)src));
29448281036SJohn Baldwin }
29548281036SJohn Baldwin 
2966eb4157fSPawel Jakub Dawidek static __inline u_long
2976eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v)
2986eb4157fSPawel Jakub Dawidek {
2996eb4157fSPawel Jakub Dawidek 
3006eb4157fSPawel Jakub Dawidek 	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
3016eb4157fSPawel Jakub Dawidek }
3026eb4157fSPawel Jakub Dawidek 
30348281036SJohn Baldwin /* Read the current value and store a zero in the destination. */
30448281036SJohn Baldwin #ifdef __GNUCLIKE_ASM
30548281036SJohn Baldwin 
30648281036SJohn Baldwin static __inline u_int
30748281036SJohn Baldwin atomic_readandclear_int(volatile u_int *addr)
30848281036SJohn Baldwin {
309f28e1c8fSBruce Evans 	u_int res;
31048281036SJohn Baldwin 
311f28e1c8fSBruce Evans 	res = 0;
31248281036SJohn Baldwin 	__asm __volatile(
31348281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
31448281036SJohn Baldwin 	"# atomic_readandclear_int"
315f28e1c8fSBruce Evans 	: "+r" (res),			/* 0 */
316f28e1c8fSBruce Evans 	  "=m" (*addr)			/* 1 */
31780d52f16SJohn Baldwin 	: "m" (*addr));
31848281036SJohn Baldwin 
319f28e1c8fSBruce Evans 	return (res);
32048281036SJohn Baldwin }
32148281036SJohn Baldwin 
32248281036SJohn Baldwin static __inline u_long
32348281036SJohn Baldwin atomic_readandclear_long(volatile u_long *addr)
32448281036SJohn Baldwin {
325f28e1c8fSBruce Evans 	u_long res;
32648281036SJohn Baldwin 
327f28e1c8fSBruce Evans 	res = 0;
32848281036SJohn Baldwin 	__asm __volatile(
32948281036SJohn Baldwin 	"	xchgl	%1,%0 ;		"
33048281036SJohn Baldwin 	"# atomic_readandclear_long"
3310b194ec8SBruce Evans 	: "+r" (res),			/* 0 */
332f28e1c8fSBruce Evans 	  "=m" (*addr)			/* 1 */
33380d52f16SJohn Baldwin 	: "m" (*addr));
33448281036SJohn Baldwin 
335f28e1c8fSBruce Evans 	return (res);
33648281036SJohn Baldwin }
33748281036SJohn Baldwin 
33848281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */
33948281036SJohn Baldwin 
340f28e1c8fSBruce Evans u_int	atomic_readandclear_int(volatile u_int *addr);
341f28e1c8fSBruce Evans u_long	atomic_readandclear_long(volatile u_long *addr);
34248281036SJohn Baldwin 
34348281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */
34448281036SJohn Baldwin 
34548281036SJohn Baldwin /* Acquire and release variants are identical to the normal ones. */
3468a6b1c8fSJohn Baldwin #define	atomic_set_acq_char		atomic_set_char
3478a6b1c8fSJohn Baldwin #define	atomic_set_rel_char		atomic_set_char
3488a6b1c8fSJohn Baldwin #define	atomic_clear_acq_char		atomic_clear_char
3498a6b1c8fSJohn Baldwin #define	atomic_clear_rel_char		atomic_clear_char
3508a6b1c8fSJohn Baldwin #define	atomic_add_acq_char		atomic_add_char
3518a6b1c8fSJohn Baldwin #define	atomic_add_rel_char		atomic_add_char
3528a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_char	atomic_subtract_char
3538a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_char	atomic_subtract_char
3548a6b1c8fSJohn Baldwin 
3558a6b1c8fSJohn Baldwin #define	atomic_set_acq_short		atomic_set_short
3568a6b1c8fSJohn Baldwin #define	atomic_set_rel_short		atomic_set_short
3578a6b1c8fSJohn Baldwin #define	atomic_clear_acq_short		atomic_clear_short
3588a6b1c8fSJohn Baldwin #define	atomic_clear_rel_short		atomic_clear_short
3598a6b1c8fSJohn Baldwin #define	atomic_add_acq_short		atomic_add_short
3608a6b1c8fSJohn Baldwin #define	atomic_add_rel_short		atomic_add_short
3618a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_short	atomic_subtract_short
3628a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_short	atomic_subtract_short
3638a6b1c8fSJohn Baldwin 
3648a6b1c8fSJohn Baldwin #define	atomic_set_acq_int		atomic_set_int
3658a6b1c8fSJohn Baldwin #define	atomic_set_rel_int		atomic_set_int
3668a6b1c8fSJohn Baldwin #define	atomic_clear_acq_int		atomic_clear_int
3678a6b1c8fSJohn Baldwin #define	atomic_clear_rel_int		atomic_clear_int
3688a6b1c8fSJohn Baldwin #define	atomic_add_acq_int		atomic_add_int
3698a6b1c8fSJohn Baldwin #define	atomic_add_rel_int		atomic_add_int
3708a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_int		atomic_subtract_int
3718a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_int		atomic_subtract_int
3728a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_int		atomic_cmpset_int
3738a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_int		atomic_cmpset_int
3748a6b1c8fSJohn Baldwin 
3758a6b1c8fSJohn Baldwin #define	atomic_set_acq_long		atomic_set_long
3768a6b1c8fSJohn Baldwin #define	atomic_set_rel_long		atomic_set_long
3778a6b1c8fSJohn Baldwin #define	atomic_clear_acq_long		atomic_clear_long
3788a6b1c8fSJohn Baldwin #define	atomic_clear_rel_long		atomic_clear_long
3798a6b1c8fSJohn Baldwin #define	atomic_add_acq_long		atomic_add_long
3808a6b1c8fSJohn Baldwin #define	atomic_add_rel_long		atomic_add_long
3818a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_long	atomic_subtract_long
3828a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_long	atomic_subtract_long
38348281036SJohn Baldwin #define	atomic_cmpset_acq_long		atomic_cmpset_long
38448281036SJohn Baldwin #define	atomic_cmpset_rel_long		atomic_cmpset_long
3858a6b1c8fSJohn Baldwin 
38648281036SJohn Baldwin /* Operations on 8-bit bytes. */
3878a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
3888a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
3898a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
3908a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
3918a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
3928a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
3938a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
3948a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
3958a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
3968a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
3978a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
3988a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
3998a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
4008a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
4018a6b1c8fSJohn Baldwin 
40248281036SJohn Baldwin /* Operations on 16-bit words. */
4038a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
4048a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
4058a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
4068a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
4078a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
4088a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
4098a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
4108a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
4118a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
4128a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
4138a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
4148a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
4158a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
4168a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
4178a6b1c8fSJohn Baldwin 
41848281036SJohn Baldwin /* Operations on 32-bit double words. */
4198a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
4208a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
4218a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
4228a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
4238a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
4248a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
4258a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
4268a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
4278a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
4288a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
4298a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
4308a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
4318a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
4328a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
4338a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
4348a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
4358a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
4368a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
4373c2bc2bfSJohn Baldwin #define	atomic_fetchadd_32	atomic_fetchadd_int
4388a6b1c8fSJohn Baldwin 
43948281036SJohn Baldwin /* Operations on pointers. */
4406f0f8ccaSDag-Erling Smørgrav #define	atomic_set_ptr(p, v) \
4416f0f8ccaSDag-Erling Smørgrav 	atomic_set_int((volatile u_int *)(p), (u_int)(v))
4426f0f8ccaSDag-Erling Smørgrav #define	atomic_set_acq_ptr(p, v) \
4436f0f8ccaSDag-Erling Smørgrav 	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
4446f0f8ccaSDag-Erling Smørgrav #define	atomic_set_rel_ptr(p, v) \
4456f0f8ccaSDag-Erling Smørgrav 	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
4466f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_ptr(p, v) \
4476f0f8ccaSDag-Erling Smørgrav 	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
4486f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_acq_ptr(p, v) \
4496f0f8ccaSDag-Erling Smørgrav 	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
4506f0f8ccaSDag-Erling Smørgrav #define	atomic_clear_rel_ptr(p, v) \
4516f0f8ccaSDag-Erling Smørgrav 	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
4526f0f8ccaSDag-Erling Smørgrav #define	atomic_add_ptr(p, v) \
4536f0f8ccaSDag-Erling Smørgrav 	atomic_add_int((volatile u_int *)(p), (u_int)(v))
4546f0f8ccaSDag-Erling Smørgrav #define	atomic_add_acq_ptr(p, v) \
4556f0f8ccaSDag-Erling Smørgrav 	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
4566f0f8ccaSDag-Erling Smørgrav #define	atomic_add_rel_ptr(p, v) \
4576f0f8ccaSDag-Erling Smørgrav 	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
4586f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_ptr(p, v) \
4596f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
4606f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_acq_ptr(p, v) \
4616f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
4626f0f8ccaSDag-Erling Smørgrav #define	atomic_subtract_rel_ptr(p, v) \
4636f0f8ccaSDag-Erling Smørgrav 	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
4646f0f8ccaSDag-Erling Smørgrav #define	atomic_load_acq_ptr(p) \
4656f0f8ccaSDag-Erling Smørgrav 	atomic_load_acq_int((volatile u_int *)(p))
4666f0f8ccaSDag-Erling Smørgrav #define	atomic_store_rel_ptr(p, v) \
4676f0f8ccaSDag-Erling Smørgrav 	atomic_store_rel_int((volatile u_int *)(p), (v))
4686f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_ptr(dst, old, new) \
4696f0f8ccaSDag-Erling Smørgrav 	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
4706f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_acq_ptr(dst, old, new) \
4716c296ffaSBruce Evans 	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
4726c296ffaSBruce Evans 	    (u_int)(new))
4736f0f8ccaSDag-Erling Smørgrav #define	atomic_cmpset_rel_ptr(dst, old, new) \
4746c296ffaSBruce Evans 	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
4756c296ffaSBruce Evans 	    (u_int)(new))
4766f0f8ccaSDag-Erling Smørgrav #define	atomic_readandclear_ptr(p) \
4776f0f8ccaSDag-Erling Smørgrav 	atomic_readandclear_int((volatile u_int *)(p))
478ccbdd9eeSJohn Baldwin 
479f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */
4806c296ffaSBruce Evans 
481069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */
482