xref: /freebsd/sys/i386/include/atomic.h (revision 8306a37bbba8c566f7b161d5b2cd80cc4462f412)
1069e9bc1SDoug Rabson /*-
2069e9bc1SDoug Rabson  * Copyright (c) 1998 Doug Rabson
3069e9bc1SDoug Rabson  * All rights reserved.
4069e9bc1SDoug Rabson  *
5069e9bc1SDoug Rabson  * Redistribution and use in source and binary forms, with or without
6069e9bc1SDoug Rabson  * modification, are permitted provided that the following conditions
7069e9bc1SDoug Rabson  * are met:
8069e9bc1SDoug Rabson  * 1. Redistributions of source code must retain the above copyright
9069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer.
10069e9bc1SDoug Rabson  * 2. Redistributions in binary form must reproduce the above copyright
11069e9bc1SDoug Rabson  *    notice, this list of conditions and the following disclaimer in the
12069e9bc1SDoug Rabson  *    documentation and/or other materials provided with the distribution.
13069e9bc1SDoug Rabson  *
14069e9bc1SDoug Rabson  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15069e9bc1SDoug Rabson  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16069e9bc1SDoug Rabson  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17069e9bc1SDoug Rabson  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18069e9bc1SDoug Rabson  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19069e9bc1SDoug Rabson  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20069e9bc1SDoug Rabson  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21069e9bc1SDoug Rabson  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22069e9bc1SDoug Rabson  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23069e9bc1SDoug Rabson  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24069e9bc1SDoug Rabson  * SUCH DAMAGE.
25069e9bc1SDoug Rabson  *
26c3aac50fSPeter Wemm  * $FreeBSD$
27069e9bc1SDoug Rabson  */
28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_
29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_
30069e9bc1SDoug Rabson 
318306a37bSMark Murray #ifndef __GNUC__
328306a37bSMark Murray #ifndef lint
338306a37bSMark Murray #error "This file must be compiled with GCC or lint"
348306a37bSMark Murray #endif /* lint */
358306a37bSMark Murray #endif /* __GNUC__ */
368306a37bSMark Murray 
37069e9bc1SDoug Rabson /*
38069e9bc1SDoug Rabson  * Various simple arithmetic on memory which is atomic in the presence
3947b8bc92SAlan Cox  * of interrupts and multiple processors.
40069e9bc1SDoug Rabson  *
4147b8bc92SAlan Cox  * atomic_set_char(P, V)	(*(u_char*)(P) |= (V))
4247b8bc92SAlan Cox  * atomic_clear_char(P, V)	(*(u_char*)(P) &= ~(V))
4347b8bc92SAlan Cox  * atomic_add_char(P, V)	(*(u_char*)(P) += (V))
4447b8bc92SAlan Cox  * atomic_subtract_char(P, V)	(*(u_char*)(P) -= (V))
4547b8bc92SAlan Cox  *
4647b8bc92SAlan Cox  * atomic_set_short(P, V)	(*(u_short*)(P) |= (V))
4747b8bc92SAlan Cox  * atomic_clear_short(P, V)	(*(u_short*)(P) &= ~(V))
4847b8bc92SAlan Cox  * atomic_add_short(P, V)	(*(u_short*)(P) += (V))
4947b8bc92SAlan Cox  * atomic_subtract_short(P, V)	(*(u_short*)(P) -= (V))
5047b8bc92SAlan Cox  *
5147b8bc92SAlan Cox  * atomic_set_int(P, V)		(*(u_int*)(P) |= (V))
5247b8bc92SAlan Cox  * atomic_clear_int(P, V)	(*(u_int*)(P) &= ~(V))
5347b8bc92SAlan Cox  * atomic_add_int(P, V)		(*(u_int*)(P) += (V))
5447b8bc92SAlan Cox  * atomic_subtract_int(P, V)	(*(u_int*)(P) -= (V))
55b4645202SJohn Baldwin  * atomic_readandclear_int(P)	(return  *(u_int*)P; *(u_int*)P = 0;)
5647b8bc92SAlan Cox  *
5747b8bc92SAlan Cox  * atomic_set_long(P, V)	(*(u_long*)(P) |= (V))
5847b8bc92SAlan Cox  * atomic_clear_long(P, V)	(*(u_long*)(P) &= ~(V))
5947b8bc92SAlan Cox  * atomic_add_long(P, V)	(*(u_long*)(P) += (V))
6047b8bc92SAlan Cox  * atomic_subtract_long(P, V)	(*(u_long*)(P) -= (V))
61b4645202SJohn Baldwin  * atomic_readandclear_long(P)	(return  *(u_long*)P; *(u_long*)P = 0;)
62069e9bc1SDoug Rabson  */
63069e9bc1SDoug Rabson 
6447b8bc92SAlan Cox /*
6508c40841SAlan Cox  * The above functions are expanded inline in the statically-linked
6608c40841SAlan Cox  * kernel.  Lock prefixes are generated if an SMP kernel is being
6708c40841SAlan Cox  * built.
6808c40841SAlan Cox  *
6908c40841SAlan Cox  * Kernel modules call real functions which are built into the kernel.
7008c40841SAlan Cox  * This allows kernel modules to be portable between UP and SMP systems.
7147b8bc92SAlan Cox  */
7208c40841SAlan Cox #if defined(KLD_MODULE)
73e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
748306a37bSMark Murray void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
7508c40841SAlan Cox 
76b4645202SJohn Baldwin int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
77819e370cSPoul-Henning Kamp 
788a6b1c8fSJohn Baldwin #define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
798a6b1c8fSJohn Baldwin u_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
808306a37bSMark Murray void		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
818a6b1c8fSJohn Baldwin 
8208c40841SAlan Cox #else /* !KLD_MODULE */
83d888fc4eSMark Murray 
842a89a48fSJohn Baldwin /*
852a89a48fSJohn Baldwin  * For userland, assume the SMP case and use lock prefixes so that
862a89a48fSJohn Baldwin  * the binaries will run on both types of systems.
872a89a48fSJohn Baldwin  */
882a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL)
8971acb247SBosko Milekic #define MPLOCKED	lock ;
90d2f22d70SBruce Evans #else
9147b8bc92SAlan Cox #define MPLOCKED
92d2f22d70SBruce Evans #endif
93069e9bc1SDoug Rabson 
9447b8bc92SAlan Cox /*
9547b8bc92SAlan Cox  * The assembly is volatilized to demark potential before-and-after side
9647b8bc92SAlan Cox  * effects if an interrupt or SMP collision were to occur.
9747b8bc92SAlan Cox  */
988306a37bSMark Murray #ifdef __GNUC__
99e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
10047b8bc92SAlan Cox static __inline void					\
10103e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
10247b8bc92SAlan Cox {							\
10371acb247SBosko Milekic 	__asm __volatile(__XSTRING(MPLOCKED) OP		\
10420a2016aSJohn Baldwin 			 : "+m" (*p)			\
105e4e991e1SJohn Baldwin 			 : CONS (V));			\
10647b8bc92SAlan Cox }
1078306a37bSMark Murray #else /* !__GNUC__ */
1088306a37bSMark Murray #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
1098306a37bSMark Murray void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
1108306a37bSMark Murray #endif /* __GNUC__ */
111693612ebSPeter Wemm 
112819e370cSPoul-Henning Kamp /*
113819e370cSPoul-Henning Kamp  * Atomic compare and set, used by the mutex functions
114819e370cSPoul-Henning Kamp  *
115819e370cSPoul-Henning Kamp  * if (*dst == exp) *dst = src (all 32 bit words)
116819e370cSPoul-Henning Kamp  *
117819e370cSPoul-Henning Kamp  * Returns 0 on failure, non-zero on success
118819e370cSPoul-Henning Kamp  */
119819e370cSPoul-Henning Kamp 
1208306a37bSMark Murray #if defined(__GNUC__)
121819e370cSPoul-Henning Kamp #if defined(I386_CPU)
122819e370cSPoul-Henning Kamp static __inline int
123819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
124819e370cSPoul-Henning Kamp {
125819e370cSPoul-Henning Kamp 	int res = exp;
126819e370cSPoul-Henning Kamp 
127819e370cSPoul-Henning Kamp 	__asm __volatile(
128819e370cSPoul-Henning Kamp 	"	pushfl ;		"
129819e370cSPoul-Henning Kamp 	"	cli ;			"
130d9009094SJohn Baldwin 	"	cmpl	%0,%2 ;		"
131819e370cSPoul-Henning Kamp 	"	jne	1f ;		"
132d9009094SJohn Baldwin 	"	movl	%1,%2 ;		"
133819e370cSPoul-Henning Kamp 	"1:				"
134819e370cSPoul-Henning Kamp 	"       sete	%%al;		"
135819e370cSPoul-Henning Kamp 	"	movzbl	%%al,%0 ;	"
136819e370cSPoul-Henning Kamp 	"	popfl ;			"
137819e370cSPoul-Henning Kamp 	"# atomic_cmpset_int"
138d9009094SJohn Baldwin 	: "+a" (res)			/* 0 (result) */
139d9009094SJohn Baldwin 	: "r" (src),			/* 1 */
140d9009094SJohn Baldwin 	  "m" (*(dst))			/* 2 */
141819e370cSPoul-Henning Kamp 	: "memory");
142819e370cSPoul-Henning Kamp 
143819e370cSPoul-Henning Kamp 	return (res);
144819e370cSPoul-Henning Kamp }
145819e370cSPoul-Henning Kamp #else /* defined(I386_CPU) */
146819e370cSPoul-Henning Kamp static __inline int
147819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
148819e370cSPoul-Henning Kamp {
149819e370cSPoul-Henning Kamp 	int res = exp;
150819e370cSPoul-Henning Kamp 
151819e370cSPoul-Henning Kamp 	__asm __volatile (
15271acb247SBosko Milekic 	"	" __XSTRING(MPLOCKED) "	"
153d9009094SJohn Baldwin 	"	cmpxchgl %1,%2 ;	"
154819e370cSPoul-Henning Kamp 	"       setz	%%al ;		"
155819e370cSPoul-Henning Kamp 	"	movzbl	%%al,%0 ;	"
156819e370cSPoul-Henning Kamp 	"1:				"
157819e370cSPoul-Henning Kamp 	"# atomic_cmpset_int"
158d9009094SJohn Baldwin 	: "+a" (res)			/* 0 (result) */
159d9009094SJohn Baldwin 	: "r" (src),			/* 1 */
160d9009094SJohn Baldwin 	  "m" (*(dst))			/* 2 */
161819e370cSPoul-Henning Kamp 	: "memory");
162819e370cSPoul-Henning Kamp 
163819e370cSPoul-Henning Kamp 	return (res);
164819e370cSPoul-Henning Kamp }
165819e370cSPoul-Henning Kamp #endif /* defined(I386_CPU) */
1668306a37bSMark Murray #else /* !defined(__GNUC__) */
1678306a37bSMark Murray static __inline int
1688306a37bSMark Murray atomic_cmpset_int(volatile u_int *dst __unused, u_int exp __unused,
1698306a37bSMark Murray     u_int src __unused)
1708306a37bSMark Murray {
1718306a37bSMark Murray }
1728306a37bSMark Murray #endif /* defined(__GNUC__) */
173819e370cSPoul-Henning Kamp 
1748306a37bSMark Murray #if defined(__GNUC__)
1759d979d89SJohn Baldwin #if defined(I386_CPU)
176ccbdd9eeSJohn Baldwin /*
177ccbdd9eeSJohn Baldwin  * We assume that a = b will do atomic loads and stores.
1789d979d89SJohn Baldwin  *
1799d979d89SJohn Baldwin  * XXX: This is _NOT_ safe on a P6 or higher because it does not guarantee
1809d979d89SJohn Baldwin  * memory ordering.  These should only be used on a 386.
181ccbdd9eeSJohn Baldwin  */
1829d979d89SJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
183ccbdd9eeSJohn Baldwin static __inline u_##TYPE				\
184ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
185ccbdd9eeSJohn Baldwin {							\
186ccbdd9eeSJohn Baldwin 	return (*p);					\
187ccbdd9eeSJohn Baldwin }							\
188ccbdd9eeSJohn Baldwin 							\
189ccbdd9eeSJohn Baldwin static __inline void					\
190ccbdd9eeSJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
191ccbdd9eeSJohn Baldwin {							\
192ccbdd9eeSJohn Baldwin 	*p = v;						\
193ccbdd9eeSJohn Baldwin 	__asm __volatile("" : : : "memory");		\
194ccbdd9eeSJohn Baldwin }
1958306a37bSMark Murray #else /* !defined(I386_CPU) */
196ccbdd9eeSJohn Baldwin 
1979d979d89SJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
1989d979d89SJohn Baldwin static __inline u_##TYPE				\
1999d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
2009d979d89SJohn Baldwin {							\
2019d979d89SJohn Baldwin 	u_##TYPE res;					\
2029d979d89SJohn Baldwin 							\
20371acb247SBosko Milekic 	__asm __volatile(__XSTRING(MPLOCKED) LOP	\
204324fffaeSJohn Baldwin 	: "=a" (res),			/* 0 (result) */\
2059d979d89SJohn Baldwin 	  "+m" (*p)			/* 1 */		\
206e4e991e1SJohn Baldwin 	: : "memory");				 	\
2079d979d89SJohn Baldwin 							\
2089d979d89SJohn Baldwin 	return (res);					\
2099d979d89SJohn Baldwin }							\
2109d979d89SJohn Baldwin 							\
2119d979d89SJohn Baldwin /*							\
2129d979d89SJohn Baldwin  * The XCHG instruction asserts LOCK automagically.	\
2139d979d89SJohn Baldwin  */							\
2149d979d89SJohn Baldwin static __inline void					\
2159d979d89SJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
2169d979d89SJohn Baldwin {							\
2179d979d89SJohn Baldwin 	__asm __volatile(SOP				\
2189d979d89SJohn Baldwin 	: "+m" (*p),			/* 0 */		\
2199d979d89SJohn Baldwin 	  "+r" (v)			/* 1 */		\
2209d979d89SJohn Baldwin 	: : "memory");				 	\
2219d979d89SJohn Baldwin }
2229d979d89SJohn Baldwin #endif	/* defined(I386_CPU) */
2238306a37bSMark Murray #else /* !defined(__GNUC__) */
2248306a37bSMark Murray 
2258306a37bSMark Murray /*
2268306a37bSMark Murray  * XXXX: Dummy functions!!
2278306a37bSMark Murray  */
2288306a37bSMark Murray #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
2298306a37bSMark Murray u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p __unused);	\
2308306a37bSMark Murray void atomic_store_rel_##TYPE(volatile u_##TYPE *p __unused,	\
2318306a37bSMark Murray     u_##TYPE v __unused)
2328306a37bSMark Murray 
2338306a37bSMark Murray #endif /* defined(__GNUC__) */
2348a6b1c8fSJohn Baldwin #endif /* KLD_MODULE */
2358a6b1c8fSJohn Baldwin 
2368306a37bSMark Murray ATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
2378306a37bSMark Murray ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
2388306a37bSMark Murray ATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
2398306a37bSMark Murray ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
2408a6b1c8fSJohn Baldwin 
2418306a37bSMark Murray ATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
2428306a37bSMark Murray ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
2438306a37bSMark Murray ATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
2448306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
2458a6b1c8fSJohn Baldwin 
2468306a37bSMark Murray ATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
2478306a37bSMark Murray ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
2488306a37bSMark Murray ATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
2498306a37bSMark Murray ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
2508a6b1c8fSJohn Baldwin 
2518306a37bSMark Murray ATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
2528306a37bSMark Murray ATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
2538306a37bSMark Murray ATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
2548306a37bSMark Murray ATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
2559d979d89SJohn Baldwin 
2568306a37bSMark Murray ATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
2578306a37bSMark Murray ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
2588306a37bSMark Murray ATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
2598306a37bSMark Murray ATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
260ccbdd9eeSJohn Baldwin 
2618a6b1c8fSJohn Baldwin #undef ATOMIC_ASM
262ccbdd9eeSJohn Baldwin #undef ATOMIC_STORE_LOAD
263ccbdd9eeSJohn Baldwin 
2648a6b1c8fSJohn Baldwin #define	atomic_set_acq_char		atomic_set_char
2658a6b1c8fSJohn Baldwin #define	atomic_set_rel_char		atomic_set_char
2668a6b1c8fSJohn Baldwin #define	atomic_clear_acq_char		atomic_clear_char
2678a6b1c8fSJohn Baldwin #define	atomic_clear_rel_char		atomic_clear_char
2688a6b1c8fSJohn Baldwin #define	atomic_add_acq_char		atomic_add_char
2698a6b1c8fSJohn Baldwin #define	atomic_add_rel_char		atomic_add_char
2708a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_char	atomic_subtract_char
2718a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_char	atomic_subtract_char
2728a6b1c8fSJohn Baldwin 
2738a6b1c8fSJohn Baldwin #define	atomic_set_acq_short		atomic_set_short
2748a6b1c8fSJohn Baldwin #define	atomic_set_rel_short		atomic_set_short
2758a6b1c8fSJohn Baldwin #define	atomic_clear_acq_short		atomic_clear_short
2768a6b1c8fSJohn Baldwin #define	atomic_clear_rel_short		atomic_clear_short
2778a6b1c8fSJohn Baldwin #define	atomic_add_acq_short		atomic_add_short
2788a6b1c8fSJohn Baldwin #define	atomic_add_rel_short		atomic_add_short
2798a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_short	atomic_subtract_short
2808a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_short	atomic_subtract_short
2818a6b1c8fSJohn Baldwin 
2828a6b1c8fSJohn Baldwin #define	atomic_set_acq_int		atomic_set_int
2838a6b1c8fSJohn Baldwin #define	atomic_set_rel_int		atomic_set_int
2848a6b1c8fSJohn Baldwin #define	atomic_clear_acq_int		atomic_clear_int
2858a6b1c8fSJohn Baldwin #define	atomic_clear_rel_int		atomic_clear_int
2868a6b1c8fSJohn Baldwin #define	atomic_add_acq_int		atomic_add_int
2878a6b1c8fSJohn Baldwin #define	atomic_add_rel_int		atomic_add_int
2888a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_int		atomic_subtract_int
2898a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_int		atomic_subtract_int
2908a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_int		atomic_cmpset_int
2918a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_int		atomic_cmpset_int
2928a6b1c8fSJohn Baldwin 
2938a6b1c8fSJohn Baldwin #define	atomic_set_acq_long		atomic_set_long
2948a6b1c8fSJohn Baldwin #define	atomic_set_rel_long		atomic_set_long
2958a6b1c8fSJohn Baldwin #define	atomic_clear_acq_long		atomic_clear_long
2968a6b1c8fSJohn Baldwin #define	atomic_clear_rel_long		atomic_clear_long
2978a6b1c8fSJohn Baldwin #define	atomic_add_acq_long		atomic_add_long
2988a6b1c8fSJohn Baldwin #define	atomic_add_rel_long		atomic_add_long
2998a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_long	atomic_subtract_long
3008a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_long	atomic_subtract_long
3018a6b1c8fSJohn Baldwin #define	atomic_cmpset_long		atomic_cmpset_int
3028a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_long		atomic_cmpset_acq_int
3038a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_long		atomic_cmpset_rel_int
3048a6b1c8fSJohn Baldwin 
3058a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_ptr		atomic_cmpset_ptr
3068a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_ptr		atomic_cmpset_ptr
3078a6b1c8fSJohn Baldwin 
3088a6b1c8fSJohn Baldwin #define	atomic_set_8		atomic_set_char
3098a6b1c8fSJohn Baldwin #define	atomic_set_acq_8	atomic_set_acq_char
3108a6b1c8fSJohn Baldwin #define	atomic_set_rel_8	atomic_set_rel_char
3118a6b1c8fSJohn Baldwin #define	atomic_clear_8		atomic_clear_char
3128a6b1c8fSJohn Baldwin #define	atomic_clear_acq_8	atomic_clear_acq_char
3138a6b1c8fSJohn Baldwin #define	atomic_clear_rel_8	atomic_clear_rel_char
3148a6b1c8fSJohn Baldwin #define	atomic_add_8		atomic_add_char
3158a6b1c8fSJohn Baldwin #define	atomic_add_acq_8	atomic_add_acq_char
3168a6b1c8fSJohn Baldwin #define	atomic_add_rel_8	atomic_add_rel_char
3178a6b1c8fSJohn Baldwin #define	atomic_subtract_8	atomic_subtract_char
3188a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_8	atomic_subtract_acq_char
3198a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_8	atomic_subtract_rel_char
3208a6b1c8fSJohn Baldwin #define	atomic_load_acq_8	atomic_load_acq_char
3218a6b1c8fSJohn Baldwin #define	atomic_store_rel_8	atomic_store_rel_char
3228a6b1c8fSJohn Baldwin 
3238a6b1c8fSJohn Baldwin #define	atomic_set_16		atomic_set_short
3248a6b1c8fSJohn Baldwin #define	atomic_set_acq_16	atomic_set_acq_short
3258a6b1c8fSJohn Baldwin #define	atomic_set_rel_16	atomic_set_rel_short
3268a6b1c8fSJohn Baldwin #define	atomic_clear_16		atomic_clear_short
3278a6b1c8fSJohn Baldwin #define	atomic_clear_acq_16	atomic_clear_acq_short
3288a6b1c8fSJohn Baldwin #define	atomic_clear_rel_16	atomic_clear_rel_short
3298a6b1c8fSJohn Baldwin #define	atomic_add_16		atomic_add_short
3308a6b1c8fSJohn Baldwin #define	atomic_add_acq_16	atomic_add_acq_short
3318a6b1c8fSJohn Baldwin #define	atomic_add_rel_16	atomic_add_rel_short
3328a6b1c8fSJohn Baldwin #define	atomic_subtract_16	atomic_subtract_short
3338a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_16	atomic_subtract_acq_short
3348a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_16	atomic_subtract_rel_short
3358a6b1c8fSJohn Baldwin #define	atomic_load_acq_16	atomic_load_acq_short
3368a6b1c8fSJohn Baldwin #define	atomic_store_rel_16	atomic_store_rel_short
3378a6b1c8fSJohn Baldwin 
3388a6b1c8fSJohn Baldwin #define	atomic_set_32		atomic_set_int
3398a6b1c8fSJohn Baldwin #define	atomic_set_acq_32	atomic_set_acq_int
3408a6b1c8fSJohn Baldwin #define	atomic_set_rel_32	atomic_set_rel_int
3418a6b1c8fSJohn Baldwin #define	atomic_clear_32		atomic_clear_int
3428a6b1c8fSJohn Baldwin #define	atomic_clear_acq_32	atomic_clear_acq_int
3438a6b1c8fSJohn Baldwin #define	atomic_clear_rel_32	atomic_clear_rel_int
3448a6b1c8fSJohn Baldwin #define	atomic_add_32		atomic_add_int
3458a6b1c8fSJohn Baldwin #define	atomic_add_acq_32	atomic_add_acq_int
3468a6b1c8fSJohn Baldwin #define	atomic_add_rel_32	atomic_add_rel_int
3478a6b1c8fSJohn Baldwin #define	atomic_subtract_32	atomic_subtract_int
3488a6b1c8fSJohn Baldwin #define	atomic_subtract_acq_32	atomic_subtract_acq_int
3498a6b1c8fSJohn Baldwin #define	atomic_subtract_rel_32	atomic_subtract_rel_int
3508a6b1c8fSJohn Baldwin #define	atomic_load_acq_32	atomic_load_acq_int
3518a6b1c8fSJohn Baldwin #define	atomic_store_rel_32	atomic_store_rel_int
3528a6b1c8fSJohn Baldwin #define	atomic_cmpset_32	atomic_cmpset_int
3538a6b1c8fSJohn Baldwin #define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
3548a6b1c8fSJohn Baldwin #define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
3558a6b1c8fSJohn Baldwin #define	atomic_readandclear_32	atomic_readandclear_int
3568a6b1c8fSJohn Baldwin 
3578a6b1c8fSJohn Baldwin #if !defined(WANT_FUNCTIONS)
358819e370cSPoul-Henning Kamp static __inline int
359819e370cSPoul-Henning Kamp atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
360819e370cSPoul-Henning Kamp {
361819e370cSPoul-Henning Kamp 
3628a6b1c8fSJohn Baldwin 	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
3638a6b1c8fSJohn Baldwin 	    (u_int)src));
364819e370cSPoul-Henning Kamp }
365b4645202SJohn Baldwin 
366ccbdd9eeSJohn Baldwin static __inline void *
367ccbdd9eeSJohn Baldwin atomic_load_acq_ptr(volatile void *p)
368ccbdd9eeSJohn Baldwin {
369ccbdd9eeSJohn Baldwin 	return (void *)atomic_load_acq_int((volatile u_int *)p);
370ccbdd9eeSJohn Baldwin }
371ccbdd9eeSJohn Baldwin 
372ccbdd9eeSJohn Baldwin static __inline void
373ccbdd9eeSJohn Baldwin atomic_store_rel_ptr(volatile void *p, void *v)
374ccbdd9eeSJohn Baldwin {
375ccbdd9eeSJohn Baldwin 	atomic_store_rel_int((volatile u_int *)p, (u_int)v);
376ccbdd9eeSJohn Baldwin }
377ccbdd9eeSJohn Baldwin 
378ccbdd9eeSJohn Baldwin #define ATOMIC_PTR(NAME)				\
379ccbdd9eeSJohn Baldwin static __inline void					\
380ccbdd9eeSJohn Baldwin atomic_##NAME##_ptr(volatile void *p, uintptr_t v)	\
381ccbdd9eeSJohn Baldwin {							\
382ccbdd9eeSJohn Baldwin 	atomic_##NAME##_int((volatile u_int *)p, v);	\
383ccbdd9eeSJohn Baldwin }							\
384ccbdd9eeSJohn Baldwin 							\
385ccbdd9eeSJohn Baldwin static __inline void					\
386ccbdd9eeSJohn Baldwin atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v)	\
387ccbdd9eeSJohn Baldwin {							\
388ccbdd9eeSJohn Baldwin 	atomic_##NAME##_acq_int((volatile u_int *)p, v);\
389ccbdd9eeSJohn Baldwin }							\
390ccbdd9eeSJohn Baldwin 							\
391ccbdd9eeSJohn Baldwin static __inline void					\
392ccbdd9eeSJohn Baldwin atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v)	\
393ccbdd9eeSJohn Baldwin {							\
394ccbdd9eeSJohn Baldwin 	atomic_##NAME##_rel_int((volatile u_int *)p, v);\
395ccbdd9eeSJohn Baldwin }
396ccbdd9eeSJohn Baldwin 
397ccbdd9eeSJohn Baldwin ATOMIC_PTR(set)
398ccbdd9eeSJohn Baldwin ATOMIC_PTR(clear)
399ccbdd9eeSJohn Baldwin ATOMIC_PTR(add)
400ccbdd9eeSJohn Baldwin ATOMIC_PTR(subtract)
401ccbdd9eeSJohn Baldwin 
402ccbdd9eeSJohn Baldwin #undef ATOMIC_PTR
403ccbdd9eeSJohn Baldwin 
4048306a37bSMark Murray #if defined(__GNUC__)
405b4645202SJohn Baldwin static __inline u_int
406b4645202SJohn Baldwin atomic_readandclear_int(volatile u_int *addr)
407b4645202SJohn Baldwin {
408b4645202SJohn Baldwin 	u_int result;
409b4645202SJohn Baldwin 
410b4645202SJohn Baldwin 	__asm __volatile (
411b4645202SJohn Baldwin 	"	xorl	%0,%0 ;		"
412b4645202SJohn Baldwin 	"	xchgl	%1,%0 ;		"
413b4645202SJohn Baldwin 	"# atomic_readandclear_int"
414b4645202SJohn Baldwin 	: "=&r" (result)		/* 0 (result) */
415b4645202SJohn Baldwin 	: "m" (*addr));			/* 1 (addr) */
416b4645202SJohn Baldwin 
417b4645202SJohn Baldwin 	return (result);
418b4645202SJohn Baldwin }
4198306a37bSMark Murray #else /* !defined(__GNUC__) */
4208306a37bSMark Murray /*
4218306a37bSMark Murray  * XXXX: Dummy!
4228306a37bSMark Murray  */
4238306a37bSMark Murray static __inline u_int
4248306a37bSMark Murray atomic_readandclear_int(volatile u_int *addr __unused)
4258306a37bSMark Murray {
4268306a37bSMark Murray }
4278306a37bSMark Murray #endif /* defined(__GNUC__) */
428b4645202SJohn Baldwin 
4298306a37bSMark Murray #if defined(__GNUC__)
430b4645202SJohn Baldwin static __inline u_long
431b4645202SJohn Baldwin atomic_readandclear_long(volatile u_long *addr)
432b4645202SJohn Baldwin {
433b4645202SJohn Baldwin 	u_long result;
434b4645202SJohn Baldwin 
435b4645202SJohn Baldwin 	__asm __volatile (
436b4645202SJohn Baldwin 	"	xorl	%0,%0 ;		"
437b4645202SJohn Baldwin 	"	xchgl	%1,%0 ;		"
438b4645202SJohn Baldwin 	"# atomic_readandclear_int"
439b4645202SJohn Baldwin 	: "=&r" (result)		/* 0 (result) */
440b4645202SJohn Baldwin 	: "m" (*addr));			/* 1 (addr) */
441b4645202SJohn Baldwin 
442b4645202SJohn Baldwin 	return (result);
443b4645202SJohn Baldwin }
4448306a37bSMark Murray #else /* !defined(__GNUC__) */
4458306a37bSMark Murray /*
4468306a37bSMark Murray  * XXXX: Dummy!
4478306a37bSMark Murray  */
4488306a37bSMark Murray static __inline u_long
4498306a37bSMark Murray atomic_readandclear_long(volatile u_long *addr __unused)
4508306a37bSMark Murray {
4518306a37bSMark Murray }
4528306a37bSMark Murray #endif /* defined(__GNUC__) */
4538a6b1c8fSJohn Baldwin #endif	/* !defined(WANT_FUNCTIONS) */
454069e9bc1SDoug Rabson #endif /* ! _MACHINE_ATOMIC_H_ */
455