1069e9bc1SDoug Rabson /*- 2*83ef78beSPedro F. Giffuni * SPDX-License-Identifier: BSD-2-Clause-FreeBSD 3*83ef78beSPedro F. Giffuni * 4069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 5069e9bc1SDoug Rabson * All rights reserved. 6069e9bc1SDoug Rabson * 7069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 8069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 9069e9bc1SDoug Rabson * are met: 10069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 12069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 13069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 14069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 15069e9bc1SDoug Rabson * 16069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 17069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 20069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 22069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 23069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26069e9bc1SDoug Rabson * SUCH DAMAGE. 27069e9bc1SDoug Rabson * 28c3aac50fSPeter Wemm * $FreeBSD$ 29069e9bc1SDoug Rabson */ 30069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 31069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 32069e9bc1SDoug Rabson 33a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_ 34a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite 35a5f50ef9SJoerg Wunsch #endif 36a5f50ef9SJoerg Wunsch 373264fd70SJung-uk Kim #ifdef _KERNEL 383264fd70SJung-uk Kim #include <machine/md_var.h> 393264fd70SJung-uk Kim #include <machine/specialreg.h> 403264fd70SJung-uk Kim #endif 413264fd70SJung-uk Kim 4248cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF 4348cae112SKonstantin Belousov /* 4448cae112SKonstantin Belousov * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf). 4548cae112SKonstantin Belousov * 4648cae112SKonstantin Belousov * The open-coded number is used instead of the symbolic expression to 4748cae112SKonstantin Belousov * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers. 4848cae112SKonstantin Belousov * An assertion in i386/vm_machdep.c ensures that the value is correct. 4948cae112SKonstantin Belousov */ 5083c9dea1SGleb Smirnoff #define __OFFSETOF_MONITORBUF 0x80 5148cae112SKonstantin Belousov 5248cae112SKonstantin Belousov static __inline void 5348cae112SKonstantin Belousov __mbk(void) 5448cae112SKonstantin Belousov { 5548cae112SKonstantin Belousov 5648cae112SKonstantin Belousov __asm __volatile("lock; addl $0,%%fs:%0" 5748cae112SKonstantin Belousov : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc"); 5848cae112SKonstantin Belousov } 5948cae112SKonstantin Belousov 6048cae112SKonstantin Belousov static __inline void 6148cae112SKonstantin Belousov __mbu(void) 6248cae112SKonstantin Belousov { 6348cae112SKonstantin Belousov 6448cae112SKonstantin Belousov __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc"); 6548cae112SKonstantin Belousov } 6648cae112SKonstantin Belousov #endif 67db7f0b97SKip Macy 68069e9bc1SDoug Rabson /* 69f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 70f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 71069e9bc1SDoug Rabson * 7247b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 7347b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 7447b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 7547b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 7647b8bc92SAlan Cox * 7747b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 7847b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 7947b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 8047b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 8147b8bc92SAlan Cox * 8247b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 8347b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 8447b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 8547b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 868a1ee2d3SJung-uk Kim * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);) 87f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 8847b8bc92SAlan Cox * 8947b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 9047b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 9147b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 9247b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 938a1ee2d3SJung-uk Kim * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);) 94f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 95069e9bc1SDoug Rabson */ 96069e9bc1SDoug Rabson 9747b8bc92SAlan Cox /* 9808c40841SAlan Cox * The above functions are expanded inline in the statically-linked 9908c40841SAlan Cox * kernel. Lock prefixes are generated if an SMP kernel is being 10008c40841SAlan Cox * built. 10108c40841SAlan Cox * 10208c40841SAlan Cox * Kernel modules call real functions which are built into the kernel. 10308c40841SAlan Cox * This allows kernel modules to be portable between UP and SMP systems. 10447b8bc92SAlan Cox */ 10548281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 106e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10786d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 10886d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 10908c40841SAlan Cox 1103d673254SMark Johnston int atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src); 1113d673254SMark Johnston int atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src); 112065b12a7SPoul-Henning Kamp int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 1133d673254SMark Johnston int atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src); 1143d673254SMark Johnston int atomic_fcmpset_short(volatile u_short *dst, u_short *expect, 1153d673254SMark Johnston u_short src); 116e7a98aefSMateusz Guzik int atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src); 1173c2bc2bfSJohn Baldwin u_int atomic_fetchadd_int(volatile u_int *p, u_int v); 1188a1ee2d3SJung-uk Kim int atomic_testandset_int(volatile u_int *p, u_int v); 119dfdc9a05SSepherosa Ziehau int atomic_testandclear_int(volatile u_int *p, u_int v); 1208954a9a4SKonstantin Belousov void atomic_thread_fence_acq(void); 1218954a9a4SKonstantin Belousov void atomic_thread_fence_acq_rel(void); 1228954a9a4SKonstantin Belousov void atomic_thread_fence_rel(void); 1238954a9a4SKonstantin Belousov void atomic_thread_fence_seq_cst(void); 124819e370cSPoul-Henning Kamp 1257626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 126fa9f322dSKonstantin Belousov u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) 127fa9f322dSKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 1288306a37bSMark Murray void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 1298a6b1c8fSJohn Baldwin 1305188b5f3SJung-uk Kim int atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t); 1313264fd70SJung-uk Kim uint64_t atomic_load_acq_64(volatile uint64_t *); 1323264fd70SJung-uk Kim void atomic_store_rel_64(volatile uint64_t *, uint64_t); 1335188b5f3SJung-uk Kim uint64_t atomic_swap_64(volatile uint64_t *, uint64_t); 134322f006eSHans Petter Selasky uint64_t atomic_fetchadd_64(volatile uint64_t *, uint64_t); 1353264fd70SJung-uk Kim 13648281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */ 1374c5aee92SMark Murray 1382a89a48fSJohn Baldwin /* 139f28e1c8fSBruce Evans * For userland, always use lock prefixes so that the binaries will run 140f28e1c8fSBruce Evans * on both SMP and !SMP systems. 1412a89a48fSJohn Baldwin */ 1422a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL) 1437e4277e5SBruce Evans #define MPLOCKED "lock ; " 144d2f22d70SBruce Evans #else 14547b8bc92SAlan Cox #define MPLOCKED 146d2f22d70SBruce Evans #endif 147069e9bc1SDoug Rabson 14847b8bc92SAlan Cox /* 14986d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 15086d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 15186d2e48cSAttilio Rao * in order to avoid that for memory barriers. 15247b8bc92SAlan Cox */ 153e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 15447b8bc92SAlan Cox static __inline void \ 15503e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 15647b8bc92SAlan Cox { \ 1577e4277e5SBruce Evans __asm __volatile(MPLOCKED OP \ 158fe94be3dSJung-uk Kim : "+m" (*p) \ 159fe94be3dSJung-uk Kim : CONS (V) \ 1607222d2fbSKonstantin Belousov : "cc"); \ 1616d800f89SBruce Evans } \ 16286d2e48cSAttilio Rao \ 16386d2e48cSAttilio Rao static __inline void \ 16486d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 16586d2e48cSAttilio Rao { \ 16686d2e48cSAttilio Rao __asm __volatile(MPLOCKED OP \ 167fe94be3dSJung-uk Kim : "+m" (*p) \ 168fe94be3dSJung-uk Kim : CONS (V) \ 1697222d2fbSKonstantin Belousov : "memory", "cc"); \ 17086d2e48cSAttilio Rao } \ 1716d800f89SBruce Evans struct __hack 1724c5aee92SMark Murray 173819e370cSPoul-Henning Kamp /* 1743d673254SMark Johnston * Atomic compare and set, used by the mutex functions. 175819e370cSPoul-Henning Kamp * 1763d673254SMark Johnston * cmpset: 1773d673254SMark Johnston * if (*dst == expect) 1783d673254SMark Johnston * *dst = src 179819e370cSPoul-Henning Kamp * 1803d673254SMark Johnston * fcmpset: 1813d673254SMark Johnston * if (*dst == *expect) 1823d673254SMark Johnston * *dst = src 1833d673254SMark Johnston * else 1843d673254SMark Johnston * *expect = *dst 1853d673254SMark Johnston * 1863d673254SMark Johnston * Returns 0 on failure, non-zero on success. 187819e370cSPoul-Henning Kamp */ 1885788c2bdSMark Johnston #define ATOMIC_CMPSET(TYPE, CONS) \ 1893d673254SMark Johnston static __inline int \ 1903d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \ 1913d673254SMark Johnston { \ 1923d673254SMark Johnston u_char res; \ 1933d673254SMark Johnston \ 1943d673254SMark Johnston __asm __volatile( \ 1953d673254SMark Johnston " " MPLOCKED " " \ 1963d673254SMark Johnston " cmpxchg %3,%1 ; " \ 1973d673254SMark Johnston " sete %0 ; " \ 1983d673254SMark Johnston "# atomic_cmpset_" #TYPE " " \ 1993d673254SMark Johnston : "=q" (res), /* 0 */ \ 2003d673254SMark Johnston "+m" (*dst), /* 1 */ \ 2013d673254SMark Johnston "+a" (expect) /* 2 */ \ 2025788c2bdSMark Johnston : CONS (src) /* 3 */ \ 2033d673254SMark Johnston : "memory", "cc"); \ 2043d673254SMark Johnston return (res); \ 2053d673254SMark Johnston } \ 2063d673254SMark Johnston \ 2073d673254SMark Johnston static __inline int \ 2083d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \ 2093d673254SMark Johnston { \ 2103d673254SMark Johnston u_char res; \ 2113d673254SMark Johnston \ 2123d673254SMark Johnston __asm __volatile( \ 2133d673254SMark Johnston " " MPLOCKED " " \ 2143d673254SMark Johnston " cmpxchg %3,%1 ; " \ 2153d673254SMark Johnston " sete %0 ; " \ 2163d673254SMark Johnston "# atomic_fcmpset_" #TYPE " " \ 2173d673254SMark Johnston : "=q" (res), /* 0 */ \ 2183d673254SMark Johnston "+m" (*dst), /* 1 */ \ 2193d673254SMark Johnston "+a" (*expect) /* 2 */ \ 2205788c2bdSMark Johnston : CONS (src) /* 3 */ \ 2213d673254SMark Johnston : "memory", "cc"); \ 2223d673254SMark Johnston return (res); \ 2238448afceSAttilio Rao } 2244c5aee92SMark Murray 2255788c2bdSMark Johnston ATOMIC_CMPSET(char, "q"); 2265788c2bdSMark Johnston ATOMIC_CMPSET(short, "r"); 2275788c2bdSMark Johnston ATOMIC_CMPSET(int, "r"); 228e7a98aefSMateusz Guzik 2293c2bc2bfSJohn Baldwin /* 2303c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 2313c2bc2bfSJohn Baldwin * the previous value of *p. 2323c2bc2bfSJohn Baldwin */ 2333c2bc2bfSJohn Baldwin static __inline u_int 2343c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 2353c2bc2bfSJohn Baldwin { 2363c2bc2bfSJohn Baldwin 2373c2bc2bfSJohn Baldwin __asm __volatile( 2387e4277e5SBruce Evans " " MPLOCKED " " 2393c2bc2bfSJohn Baldwin " xaddl %0,%1 ; " 2403c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 241ee93d117SJung-uk Kim : "+r" (v), /* 0 */ 242fe94be3dSJung-uk Kim "+m" (*p) /* 1 */ 243fe94be3dSJung-uk Kim : : "cc"); 2443c2bc2bfSJohn Baldwin return (v); 2453c2bc2bfSJohn Baldwin } 2463c2bc2bfSJohn Baldwin 2478a1ee2d3SJung-uk Kim static __inline int 2488a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v) 2498a1ee2d3SJung-uk Kim { 2508a1ee2d3SJung-uk Kim u_char res; 2518a1ee2d3SJung-uk Kim 2528a1ee2d3SJung-uk Kim __asm __volatile( 2538a1ee2d3SJung-uk Kim " " MPLOCKED " " 2548a1ee2d3SJung-uk Kim " btsl %2,%1 ; " 2558a1ee2d3SJung-uk Kim " setc %0 ; " 2568a1ee2d3SJung-uk Kim "# atomic_testandset_int" 2578a1ee2d3SJung-uk Kim : "=q" (res), /* 0 */ 2588a1ee2d3SJung-uk Kim "+m" (*p) /* 1 */ 2598a1ee2d3SJung-uk Kim : "Ir" (v & 0x1f) /* 2 */ 2608a1ee2d3SJung-uk Kim : "cc"); 2618a1ee2d3SJung-uk Kim return (res); 2628a1ee2d3SJung-uk Kim } 2638a1ee2d3SJung-uk Kim 264dfdc9a05SSepherosa Ziehau static __inline int 265dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v) 266dfdc9a05SSepherosa Ziehau { 267dfdc9a05SSepherosa Ziehau u_char res; 268dfdc9a05SSepherosa Ziehau 269dfdc9a05SSepherosa Ziehau __asm __volatile( 270dfdc9a05SSepherosa Ziehau " " MPLOCKED " " 271dfdc9a05SSepherosa Ziehau " btrl %2,%1 ; " 272dfdc9a05SSepherosa Ziehau " setc %0 ; " 273dfdc9a05SSepherosa Ziehau "# atomic_testandclear_int" 274dfdc9a05SSepherosa Ziehau : "=q" (res), /* 0 */ 275dfdc9a05SSepherosa Ziehau "+m" (*p) /* 1 */ 276dfdc9a05SSepherosa Ziehau : "Ir" (v & 0x1f) /* 2 */ 277dfdc9a05SSepherosa Ziehau : "cc"); 278dfdc9a05SSepherosa Ziehau return (res); 279dfdc9a05SSepherosa Ziehau } 280dfdc9a05SSepherosa Ziehau 281fa9f322dSKonstantin Belousov /* 282fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the 283fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics. 284fa9f322dSKonstantin Belousov * 2857626d062SKonstantin Belousov * However, a load may pass a store if they are performed on distinct 286dd5b6425SKonstantin Belousov * addresses, so we need Store/Load barrier for sequentially 287dd5b6425SKonstantin Belousov * consistent fences in SMP kernels. We use "lock addl $0,mem" for a 288dd5b6425SKonstantin Belousov * Store/Load barrier, as recommended by the AMD Software Optimization 289dd5b6425SKonstantin Belousov * Guide, and not mfence. In the kernel, we use a private per-cpu 2900b6476ecSKonstantin Belousov * cache line for "mem", to avoid introducing false data 2910b6476ecSKonstantin Belousov * dependencies. In user space, we use the word at the top of the 2920b6476ecSKonstantin Belousov * stack. 2937626d062SKonstantin Belousov * 2947626d062SKonstantin Belousov * For UP kernels, however, the memory of the single processor is 2957626d062SKonstantin Belousov * always consistent, so we only need to stop the compiler from 2967626d062SKonstantin Belousov * reordering accesses in a way that violates the semantics of acquire 2977626d062SKonstantin Belousov * and release. 298fa9f322dSKonstantin Belousov */ 29948cae112SKonstantin Belousov 3007626d062SKonstantin Belousov #if defined(_KERNEL) 3017626d062SKonstantin Belousov #if defined(SMP) 30248cae112SKonstantin Belousov #define __storeload_barrier() __mbk() 3037626d062SKonstantin Belousov #else /* _KERNEL && UP */ 30448cae112SKonstantin Belousov #define __storeload_barrier() __compiler_membar() 3057626d062SKonstantin Belousov #endif /* SMP */ 3067626d062SKonstantin Belousov #else /* !_KERNEL */ 30748cae112SKonstantin Belousov #define __storeload_barrier() __mbu() 3087626d062SKonstantin Belousov #endif /* _KERNEL*/ 3097626d062SKonstantin Belousov 3107626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 3119d979d89SJohn Baldwin static __inline u_##TYPE \ 3129d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 3139d979d89SJohn Baldwin { \ 3149d979d89SJohn Baldwin u_##TYPE res; \ 3159d979d89SJohn Baldwin \ 3167626d062SKonstantin Belousov res = *p; \ 3177626d062SKonstantin Belousov __compiler_membar(); \ 3189d979d89SJohn Baldwin return (res); \ 3199d979d89SJohn Baldwin } \ 3206d800f89SBruce Evans struct __hack 3214c5aee92SMark Murray 3227626d062SKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 3237626d062SKonstantin Belousov static __inline void \ 3247626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \ 3257626d062SKonstantin Belousov { \ 3267626d062SKonstantin Belousov \ 3277626d062SKonstantin Belousov __compiler_membar(); \ 3287626d062SKonstantin Belousov *p = v; \ 3297626d062SKonstantin Belousov } \ 3307626d062SKonstantin Belousov struct __hack 3314c5aee92SMark Murray 3328954a9a4SKonstantin Belousov static __inline void 3338954a9a4SKonstantin Belousov atomic_thread_fence_acq(void) 3348954a9a4SKonstantin Belousov { 3358954a9a4SKonstantin Belousov 3368954a9a4SKonstantin Belousov __compiler_membar(); 3378954a9a4SKonstantin Belousov } 3388954a9a4SKonstantin Belousov 3398954a9a4SKonstantin Belousov static __inline void 3408954a9a4SKonstantin Belousov atomic_thread_fence_rel(void) 3418954a9a4SKonstantin Belousov { 3428954a9a4SKonstantin Belousov 3438954a9a4SKonstantin Belousov __compiler_membar(); 3448954a9a4SKonstantin Belousov } 3458954a9a4SKonstantin Belousov 3468954a9a4SKonstantin Belousov static __inline void 3478954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void) 3488954a9a4SKonstantin Belousov { 3498954a9a4SKonstantin Belousov 3508954a9a4SKonstantin Belousov __compiler_membar(); 3518954a9a4SKonstantin Belousov } 3528954a9a4SKonstantin Belousov 3538954a9a4SKonstantin Belousov static __inline void 3548954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void) 3558954a9a4SKonstantin Belousov { 3568954a9a4SKonstantin Belousov 3578954a9a4SKonstantin Belousov __storeload_barrier(); 3588954a9a4SKonstantin Belousov } 3598954a9a4SKonstantin Belousov 3603264fd70SJung-uk Kim #ifdef _KERNEL 3613264fd70SJung-uk Kim 3623264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS 3635188b5f3SJung-uk Kim int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t); 3645188b5f3SJung-uk Kim int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t); 3653264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i386(volatile uint64_t *); 3663264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i586(volatile uint64_t *); 3673264fd70SJung-uk Kim void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t); 3683264fd70SJung-uk Kim void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t); 3695188b5f3SJung-uk Kim uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t); 3705188b5f3SJung-uk Kim uint64_t atomic_swap_64_i586(volatile uint64_t *, uint64_t); 3713264fd70SJung-uk Kim #endif 3723264fd70SJung-uk Kim 3733264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */ 3745188b5f3SJung-uk Kim static __inline int 3755188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src) 3765188b5f3SJung-uk Kim { 3775188b5f3SJung-uk Kim volatile uint32_t *p; 3785188b5f3SJung-uk Kim u_char res; 3795188b5f3SJung-uk Kim 3805188b5f3SJung-uk Kim p = (volatile uint32_t *)dst; 3815188b5f3SJung-uk Kim __asm __volatile( 3825188b5f3SJung-uk Kim " pushfl ; " 3835188b5f3SJung-uk Kim " cli ; " 3845188b5f3SJung-uk Kim " xorl %1,%%eax ; " 3855188b5f3SJung-uk Kim " xorl %2,%%edx ; " 3865188b5f3SJung-uk Kim " orl %%edx,%%eax ; " 3875188b5f3SJung-uk Kim " jne 1f ; " 3885188b5f3SJung-uk Kim " movl %4,%1 ; " 3895188b5f3SJung-uk Kim " movl %5,%2 ; " 3905188b5f3SJung-uk Kim "1: " 3915188b5f3SJung-uk Kim " sete %3 ; " 3925188b5f3SJung-uk Kim " popfl" 3935188b5f3SJung-uk Kim : "+A" (expect), /* 0 */ 3945188b5f3SJung-uk Kim "+m" (*p), /* 1 */ 3955188b5f3SJung-uk Kim "+m" (*(p + 1)), /* 2 */ 3965188b5f3SJung-uk Kim "=q" (res) /* 3 */ 3975188b5f3SJung-uk Kim : "r" ((uint32_t)src), /* 4 */ 3985188b5f3SJung-uk Kim "r" ((uint32_t)(src >> 32)) /* 5 */ 3995188b5f3SJung-uk Kim : "memory", "cc"); 4005188b5f3SJung-uk Kim return (res); 4015188b5f3SJung-uk Kim } 4025188b5f3SJung-uk Kim 4033264fd70SJung-uk Kim static __inline uint64_t 4043264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p) 4053264fd70SJung-uk Kim { 4063264fd70SJung-uk Kim volatile uint32_t *q; 4073264fd70SJung-uk Kim uint64_t res; 4083264fd70SJung-uk Kim 4093264fd70SJung-uk Kim q = (volatile uint32_t *)p; 4103264fd70SJung-uk Kim __asm __volatile( 4113264fd70SJung-uk Kim " pushfl ; " 4123264fd70SJung-uk Kim " cli ; " 4133264fd70SJung-uk Kim " movl %1,%%eax ; " 4143264fd70SJung-uk Kim " movl %2,%%edx ; " 4153264fd70SJung-uk Kim " popfl" 4163264fd70SJung-uk Kim : "=&A" (res) /* 0 */ 4173264fd70SJung-uk Kim : "m" (*q), /* 1 */ 4183264fd70SJung-uk Kim "m" (*(q + 1)) /* 2 */ 4193264fd70SJung-uk Kim : "memory"); 4203264fd70SJung-uk Kim return (res); 4213264fd70SJung-uk Kim } 4223264fd70SJung-uk Kim 4233264fd70SJung-uk Kim static __inline void 4243264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 4253264fd70SJung-uk Kim { 4263264fd70SJung-uk Kim volatile uint32_t *q; 4273264fd70SJung-uk Kim 4283264fd70SJung-uk Kim q = (volatile uint32_t *)p; 4293264fd70SJung-uk Kim __asm __volatile( 4303264fd70SJung-uk Kim " pushfl ; " 4313264fd70SJung-uk Kim " cli ; " 4323264fd70SJung-uk Kim " movl %%eax,%0 ; " 4333264fd70SJung-uk Kim " movl %%edx,%1 ; " 4343264fd70SJung-uk Kim " popfl" 4353264fd70SJung-uk Kim : "=m" (*q), /* 0 */ 4363264fd70SJung-uk Kim "=m" (*(q + 1)) /* 1 */ 4373264fd70SJung-uk Kim : "A" (v) /* 2 */ 4383264fd70SJung-uk Kim : "memory"); 4393264fd70SJung-uk Kim } 4403264fd70SJung-uk Kim 4413264fd70SJung-uk Kim static __inline uint64_t 4425188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v) 4435188b5f3SJung-uk Kim { 4445188b5f3SJung-uk Kim volatile uint32_t *q; 4455188b5f3SJung-uk Kim uint64_t res; 4465188b5f3SJung-uk Kim 4475188b5f3SJung-uk Kim q = (volatile uint32_t *)p; 4485188b5f3SJung-uk Kim __asm __volatile( 4495188b5f3SJung-uk Kim " pushfl ; " 4505188b5f3SJung-uk Kim " cli ; " 4515188b5f3SJung-uk Kim " movl %1,%%eax ; " 4525188b5f3SJung-uk Kim " movl %2,%%edx ; " 4535188b5f3SJung-uk Kim " movl %4,%2 ; " 4545188b5f3SJung-uk Kim " movl %3,%1 ; " 4555188b5f3SJung-uk Kim " popfl" 4565188b5f3SJung-uk Kim : "=&A" (res), /* 0 */ 4575188b5f3SJung-uk Kim "+m" (*q), /* 1 */ 4585188b5f3SJung-uk Kim "+m" (*(q + 1)) /* 2 */ 4595188b5f3SJung-uk Kim : "r" ((uint32_t)v), /* 3 */ 4605188b5f3SJung-uk Kim "r" ((uint32_t)(v >> 32))); /* 4 */ 4615188b5f3SJung-uk Kim return (res); 4625188b5f3SJung-uk Kim } 4635188b5f3SJung-uk Kim 4645188b5f3SJung-uk Kim static __inline int 4655188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src) 4665188b5f3SJung-uk Kim { 4675188b5f3SJung-uk Kim u_char res; 4685188b5f3SJung-uk Kim 4695188b5f3SJung-uk Kim __asm __volatile( 4705188b5f3SJung-uk Kim " " MPLOCKED " " 4715188b5f3SJung-uk Kim " cmpxchg8b %1 ; " 4725188b5f3SJung-uk Kim " sete %0" 4735188b5f3SJung-uk Kim : "=q" (res), /* 0 */ 4745188b5f3SJung-uk Kim "+m" (*dst), /* 1 */ 4755188b5f3SJung-uk Kim "+A" (expect) /* 2 */ 4765188b5f3SJung-uk Kim : "b" ((uint32_t)src), /* 3 */ 4775188b5f3SJung-uk Kim "c" ((uint32_t)(src >> 32)) /* 4 */ 4785188b5f3SJung-uk Kim : "memory", "cc"); 4795188b5f3SJung-uk Kim return (res); 4805188b5f3SJung-uk Kim } 4815188b5f3SJung-uk Kim 4825188b5f3SJung-uk Kim static __inline uint64_t 4833264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p) 4843264fd70SJung-uk Kim { 4853264fd70SJung-uk Kim uint64_t res; 4863264fd70SJung-uk Kim 4873264fd70SJung-uk Kim __asm __volatile( 4883264fd70SJung-uk Kim " movl %%ebx,%%eax ; " 4893264fd70SJung-uk Kim " movl %%ecx,%%edx ; " 4903264fd70SJung-uk Kim " " MPLOCKED " " 4913264fd70SJung-uk Kim " cmpxchg8b %1" 4923264fd70SJung-uk Kim : "=&A" (res), /* 0 */ 4933264fd70SJung-uk Kim "+m" (*p) /* 1 */ 4943264fd70SJung-uk Kim : : "memory", "cc"); 4953264fd70SJung-uk Kim return (res); 4963264fd70SJung-uk Kim } 4973264fd70SJung-uk Kim 4983264fd70SJung-uk Kim static __inline void 4993264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 5003264fd70SJung-uk Kim { 5013264fd70SJung-uk Kim 5023264fd70SJung-uk Kim __asm __volatile( 5033264fd70SJung-uk Kim " movl %%eax,%%ebx ; " 5043264fd70SJung-uk Kim " movl %%edx,%%ecx ; " 5053264fd70SJung-uk Kim "1: " 5063264fd70SJung-uk Kim " " MPLOCKED " " 5073264fd70SJung-uk Kim " cmpxchg8b %0 ; " 5083264fd70SJung-uk Kim " jne 1b" 5093264fd70SJung-uk Kim : "+m" (*p), /* 0 */ 5103264fd70SJung-uk Kim "+A" (v) /* 1 */ 5113264fd70SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 5123264fd70SJung-uk Kim } 5133264fd70SJung-uk Kim 5143264fd70SJung-uk Kim static __inline uint64_t 5155188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v) 5165188b5f3SJung-uk Kim { 5175188b5f3SJung-uk Kim 5185188b5f3SJung-uk Kim __asm __volatile( 5195188b5f3SJung-uk Kim " movl %%eax,%%ebx ; " 5205188b5f3SJung-uk Kim " movl %%edx,%%ecx ; " 5215188b5f3SJung-uk Kim "1: " 5225188b5f3SJung-uk Kim " " MPLOCKED " " 5235188b5f3SJung-uk Kim " cmpxchg8b %0 ; " 5245188b5f3SJung-uk Kim " jne 1b" 5255188b5f3SJung-uk Kim : "+m" (*p), /* 0 */ 5265188b5f3SJung-uk Kim "+A" (v) /* 1 */ 5275188b5f3SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 5285188b5f3SJung-uk Kim return (v); 5295188b5f3SJung-uk Kim } 5305188b5f3SJung-uk Kim 5315188b5f3SJung-uk Kim static __inline int 5325188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src) 5335188b5f3SJung-uk Kim { 5345188b5f3SJung-uk Kim 5355188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5365188b5f3SJung-uk Kim return (atomic_cmpset_64_i386(dst, expect, src)); 5375188b5f3SJung-uk Kim else 5385188b5f3SJung-uk Kim return (atomic_cmpset_64_i586(dst, expect, src)); 5395188b5f3SJung-uk Kim } 5405188b5f3SJung-uk Kim 5415188b5f3SJung-uk Kim static __inline uint64_t 5423264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p) 5433264fd70SJung-uk Kim { 5443264fd70SJung-uk Kim 5453264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5463264fd70SJung-uk Kim return (atomic_load_acq_64_i386(p)); 5473264fd70SJung-uk Kim else 5483264fd70SJung-uk Kim return (atomic_load_acq_64_i586(p)); 5493264fd70SJung-uk Kim } 5503264fd70SJung-uk Kim 5513264fd70SJung-uk Kim static __inline void 5523264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v) 5533264fd70SJung-uk Kim { 5543264fd70SJung-uk Kim 5553264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5563264fd70SJung-uk Kim atomic_store_rel_64_i386(p, v); 5573264fd70SJung-uk Kim else 5583264fd70SJung-uk Kim atomic_store_rel_64_i586(p, v); 5593264fd70SJung-uk Kim } 5603264fd70SJung-uk Kim 5615188b5f3SJung-uk Kim static __inline uint64_t 5625188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v) 5635188b5f3SJung-uk Kim { 5645188b5f3SJung-uk Kim 5655188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5665188b5f3SJung-uk Kim return (atomic_swap_64_i386(p, v)); 5675188b5f3SJung-uk Kim else 5685188b5f3SJung-uk Kim return (atomic_swap_64_i586(p, v)); 5695188b5f3SJung-uk Kim } 5705188b5f3SJung-uk Kim 571322f006eSHans Petter Selasky static __inline uint64_t 572322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v) 573322f006eSHans Petter Selasky { 574322f006eSHans Petter Selasky 575322f006eSHans Petter Selasky for (;;) { 576322f006eSHans Petter Selasky uint64_t t = *p; 577322f006eSHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 578322f006eSHans Petter Selasky return (t); 579322f006eSHans Petter Selasky } 580322f006eSHans Petter Selasky } 581322f006eSHans Petter Selasky 5823264fd70SJung-uk Kim #endif /* _KERNEL */ 5833264fd70SJung-uk Kim 58448281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 5858a6b1c8fSJohn Baldwin 5868306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 5878306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 5888306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 5898306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 5908a6b1c8fSJohn Baldwin 5918306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 5928306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 5938306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 5948306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 5958a6b1c8fSJohn Baldwin 5968306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 5978306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 5988306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 5998306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 6008a6b1c8fSJohn Baldwin 6018306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 6028306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 6038306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 6048306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 6059d979d89SJohn Baldwin 6067626d062SKonstantin Belousov #define ATOMIC_LOADSTORE(TYPE) \ 6077626d062SKonstantin Belousov ATOMIC_LOAD(TYPE); \ 6087626d062SKonstantin Belousov ATOMIC_STORE(TYPE) 609fa9f322dSKonstantin Belousov 6107626d062SKonstantin Belousov ATOMIC_LOADSTORE(char); 6117626d062SKonstantin Belousov ATOMIC_LOADSTORE(short); 6127626d062SKonstantin Belousov ATOMIC_LOADSTORE(int); 6137626d062SKonstantin Belousov ATOMIC_LOADSTORE(long); 614ccbdd9eeSJohn Baldwin 6158a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 616fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD 617fa9f322dSKonstantin Belousov #undef ATOMIC_STORE 6187626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE 619ccbdd9eeSJohn Baldwin 620f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 62148281036SJohn Baldwin 62248281036SJohn Baldwin static __inline int 623065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 62448281036SJohn Baldwin { 62548281036SJohn Baldwin 626065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 62748281036SJohn Baldwin (u_int)src)); 62848281036SJohn Baldwin } 62948281036SJohn Baldwin 6306eb4157fSPawel Jakub Dawidek static __inline u_long 6316eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 6326eb4157fSPawel Jakub Dawidek { 6336eb4157fSPawel Jakub Dawidek 6346eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 6356eb4157fSPawel Jakub Dawidek } 6366eb4157fSPawel Jakub Dawidek 6378a1ee2d3SJung-uk Kim static __inline int 6388a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v) 6398a1ee2d3SJung-uk Kim { 6408a1ee2d3SJung-uk Kim 6418a1ee2d3SJung-uk Kim return (atomic_testandset_int((volatile u_int *)p, v)); 6428a1ee2d3SJung-uk Kim } 6438a1ee2d3SJung-uk Kim 644dfdc9a05SSepherosa Ziehau static __inline int 645dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v) 646dfdc9a05SSepherosa Ziehau { 647dfdc9a05SSepherosa Ziehau 648dfdc9a05SSepherosa Ziehau return (atomic_testandclear_int((volatile u_int *)p, v)); 649dfdc9a05SSepherosa Ziehau } 650dfdc9a05SSepherosa Ziehau 6518a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */ 65248281036SJohn Baldwin #ifdef __GNUCLIKE_ASM 65348281036SJohn Baldwin 65448281036SJohn Baldwin static __inline u_int 6558a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v) 65648281036SJohn Baldwin { 65748281036SJohn Baldwin 65848281036SJohn Baldwin __asm __volatile( 65948281036SJohn Baldwin " xchgl %1,%0 ; " 6608a1ee2d3SJung-uk Kim "# atomic_swap_int" 6618a1ee2d3SJung-uk Kim : "+r" (v), /* 0 */ 662fe94be3dSJung-uk Kim "+m" (*p)); /* 1 */ 6638a1ee2d3SJung-uk Kim return (v); 66448281036SJohn Baldwin } 66548281036SJohn Baldwin 66648281036SJohn Baldwin static __inline u_long 6678a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v) 66848281036SJohn Baldwin { 66948281036SJohn Baldwin 6708a1ee2d3SJung-uk Kim return (atomic_swap_int((volatile u_int *)p, (u_int)v)); 67148281036SJohn Baldwin } 67248281036SJohn Baldwin 67348281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */ 67448281036SJohn Baldwin 6758a1ee2d3SJung-uk Kim u_int atomic_swap_int(volatile u_int *p, u_int v); 6768a1ee2d3SJung-uk Kim u_long atomic_swap_long(volatile u_long *p, u_long v); 67748281036SJohn Baldwin 67848281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */ 67948281036SJohn Baldwin 68086d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 68186d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 68286d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 68386d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 68486d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 68586d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 68686d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 68786d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 6883d673254SMark Johnston #define atomic_cmpset_acq_char atomic_cmpset_char 6893d673254SMark Johnston #define atomic_cmpset_rel_char atomic_cmpset_char 6903d673254SMark Johnston #define atomic_fcmpset_acq_char atomic_fcmpset_char 6913d673254SMark Johnston #define atomic_fcmpset_rel_char atomic_fcmpset_char 6928a6b1c8fSJohn Baldwin 69386d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 69486d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 69586d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 69686d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 69786d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 69886d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 69986d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 70086d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 7013d673254SMark Johnston #define atomic_cmpset_acq_short atomic_cmpset_short 7023d673254SMark Johnston #define atomic_cmpset_rel_short atomic_cmpset_short 7033d673254SMark Johnston #define atomic_fcmpset_acq_short atomic_fcmpset_short 7043d673254SMark Johnston #define atomic_fcmpset_rel_short atomic_fcmpset_short 7058a6b1c8fSJohn Baldwin 70686d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 70786d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 70886d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 70986d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 71086d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 71186d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 71286d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 71386d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 7148448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int 7158448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int 716e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_int atomic_fcmpset_int 717e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_int atomic_fcmpset_int 7188a6b1c8fSJohn Baldwin 71986d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 72086d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 72186d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 72286d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 72386d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 72486d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 72586d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 72686d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 7278448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long 7288448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long 729e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_long atomic_fcmpset_long 730e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_long atomic_fcmpset_long 7318a6b1c8fSJohn Baldwin 7328a1ee2d3SJung-uk Kim #define atomic_readandclear_int(p) atomic_swap_int(p, 0) 7338a1ee2d3SJung-uk Kim #define atomic_readandclear_long(p) atomic_swap_long(p, 0) 7348a1ee2d3SJung-uk Kim 73548281036SJohn Baldwin /* Operations on 8-bit bytes. */ 7368a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 7378a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 7388a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 7398a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 7408a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 7418a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 7428a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 7438a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 7448a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 7458a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 7468a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 7478a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 7488a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 7498a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 7503d673254SMark Johnston #define atomic_cmpset_8 atomic_cmpset_char 7513d673254SMark Johnston #define atomic_cmpset_acq_8 atomic_cmpset_acq_char 7523d673254SMark Johnston #define atomic_cmpset_rel_8 atomic_cmpset_rel_char 7533d673254SMark Johnston #define atomic_fcmpset_8 atomic_fcmpset_char 7543d673254SMark Johnston #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char 7553d673254SMark Johnston #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char 7568a6b1c8fSJohn Baldwin 75748281036SJohn Baldwin /* Operations on 16-bit words. */ 7588a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 7598a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 7608a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 7618a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 7628a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 7638a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 7648a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 7658a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 7668a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 7678a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 7688a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 7698a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 7708a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 7718a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 7723d673254SMark Johnston #define atomic_cmpset_16 atomic_cmpset_short 7733d673254SMark Johnston #define atomic_cmpset_acq_16 atomic_cmpset_acq_short 7743d673254SMark Johnston #define atomic_cmpset_rel_16 atomic_cmpset_rel_short 7753d673254SMark Johnston #define atomic_fcmpset_16 atomic_fcmpset_short 7763d673254SMark Johnston #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short 7773d673254SMark Johnston #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short 7788a6b1c8fSJohn Baldwin 77948281036SJohn Baldwin /* Operations on 32-bit double words. */ 7808a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 7818a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 7828a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 7838a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 7848a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 7858a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 7868a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 7878a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 7888a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 7898a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 7908a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 7918a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 7928a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 7938a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 7948a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 7958a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 7968a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 797e7a98aefSMateusz Guzik #define atomic_fcmpset_32 atomic_fcmpset_int 798e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int 799e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int 8008a1ee2d3SJung-uk Kim #define atomic_swap_32 atomic_swap_int 8018a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 8023c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 8038a1ee2d3SJung-uk Kim #define atomic_testandset_32 atomic_testandset_int 804dfdc9a05SSepherosa Ziehau #define atomic_testandclear_32 atomic_testandclear_int 8058a6b1c8fSJohn Baldwin 80648281036SJohn Baldwin /* Operations on pointers. */ 8076f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 8086f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 8096f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 8106f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 8116f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 8126f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 8136f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 8146f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 8156f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 8166f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 8176f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 8186f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 8196f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 8206f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 8216f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 8226f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 8236f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 8246f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 8256f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 8266f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 8276f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 8286f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 8296f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 8306f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 8316f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 8326f0f8ccaSDag-Erling Smørgrav atomic_load_acq_int((volatile u_int *)(p)) 8336f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 8346f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 8356f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 8366f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 8376f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 8386c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 8396c296ffaSBruce Evans (u_int)(new)) 8406f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 8416c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 8426c296ffaSBruce Evans (u_int)(new)) 843e7a98aefSMateusz Guzik #define atomic_fcmpset_ptr(dst, old, new) \ 844e7a98aefSMateusz Guzik atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new)) 845e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_ptr(dst, old, new) \ 846e7a98aefSMateusz Guzik atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \ 847e7a98aefSMateusz Guzik (u_int)(new)) 848e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_ptr(dst, old, new) \ 849e7a98aefSMateusz Guzik atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \ 850e7a98aefSMateusz Guzik (u_int)(new)) 8518a1ee2d3SJung-uk Kim #define atomic_swap_ptr(p, v) \ 8528a1ee2d3SJung-uk Kim atomic_swap_int((volatile u_int *)(p), (u_int)(v)) 8536f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 8546f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 855ccbdd9eeSJohn Baldwin 856f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 8576c296ffaSBruce Evans 85848cae112SKonstantin Belousov #if defined(_KERNEL) 85948cae112SKonstantin Belousov #define mb() __mbk() 86048cae112SKonstantin Belousov #define wmb() __mbk() 86148cae112SKonstantin Belousov #define rmb() __mbk() 86248cae112SKonstantin Belousov #else 86348cae112SKonstantin Belousov #define mb() __mbu() 86448cae112SKonstantin Belousov #define wmb() __mbu() 86548cae112SKonstantin Belousov #define rmb() __mbu() 86648cae112SKonstantin Belousov #endif 86748cae112SKonstantin Belousov 868069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 869