1069e9bc1SDoug Rabson /*- 283ef78beSPedro F. Giffuni * SPDX-License-Identifier: BSD-2-Clause-FreeBSD 383ef78beSPedro F. Giffuni * 4069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 5069e9bc1SDoug Rabson * All rights reserved. 6069e9bc1SDoug Rabson * 7069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 8069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 9069e9bc1SDoug Rabson * are met: 10069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 12069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 13069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 14069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 15069e9bc1SDoug Rabson * 16069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 17069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 20069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 22069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 23069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26069e9bc1SDoug Rabson * SUCH DAMAGE. 27069e9bc1SDoug Rabson * 28c3aac50fSPeter Wemm * $FreeBSD$ 29069e9bc1SDoug Rabson */ 30069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 31069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 32069e9bc1SDoug Rabson 33a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_ 34a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite 35a5f50ef9SJoerg Wunsch #endif 36a5f50ef9SJoerg Wunsch 3730d4f9e8SKonstantin Belousov #include <sys/atomic_common.h> 3830d4f9e8SKonstantin Belousov 393264fd70SJung-uk Kim #ifdef _KERNEL 403264fd70SJung-uk Kim #include <machine/md_var.h> 413264fd70SJung-uk Kim #include <machine/specialreg.h> 423264fd70SJung-uk Kim #endif 433264fd70SJung-uk Kim 4448cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF 4548cae112SKonstantin Belousov /* 4648cae112SKonstantin Belousov * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf). 4748cae112SKonstantin Belousov * 4848cae112SKonstantin Belousov * The open-coded number is used instead of the symbolic expression to 4948cae112SKonstantin Belousov * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers. 5048cae112SKonstantin Belousov * An assertion in i386/vm_machdep.c ensures that the value is correct. 5148cae112SKonstantin Belousov */ 5283c9dea1SGleb Smirnoff #define __OFFSETOF_MONITORBUF 0x80 5348cae112SKonstantin Belousov 5448cae112SKonstantin Belousov static __inline void 5548cae112SKonstantin Belousov __mbk(void) 5648cae112SKonstantin Belousov { 5748cae112SKonstantin Belousov 5848cae112SKonstantin Belousov __asm __volatile("lock; addl $0,%%fs:%0" 5948cae112SKonstantin Belousov : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc"); 6048cae112SKonstantin Belousov } 6148cae112SKonstantin Belousov 6248cae112SKonstantin Belousov static __inline void 6348cae112SKonstantin Belousov __mbu(void) 6448cae112SKonstantin Belousov { 6548cae112SKonstantin Belousov 6648cae112SKonstantin Belousov __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc"); 6748cae112SKonstantin Belousov } 6848cae112SKonstantin Belousov #endif 69db7f0b97SKip Macy 70069e9bc1SDoug Rabson /* 71f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 72f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 73069e9bc1SDoug Rabson * 7447b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 7547b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 7647b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 7747b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 7847b8bc92SAlan Cox * 7947b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 8047b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 8147b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 8247b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 8347b8bc92SAlan Cox * 8447b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 8547b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 8647b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 8747b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 888a1ee2d3SJung-uk Kim * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);) 89f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 9047b8bc92SAlan Cox * 9147b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 9247b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 9347b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 9447b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 958a1ee2d3SJung-uk Kim * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);) 96f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 97069e9bc1SDoug Rabson */ 98069e9bc1SDoug Rabson 9947b8bc92SAlan Cox /* 10008c40841SAlan Cox * The above functions are expanded inline in the statically-linked 10108c40841SAlan Cox * kernel. Lock prefixes are generated if an SMP kernel is being 10208c40841SAlan Cox * built. 10308c40841SAlan Cox * 10408c40841SAlan Cox * Kernel modules call real functions which are built into the kernel. 10508c40841SAlan Cox * This allows kernel modules to be portable between UP and SMP systems. 10647b8bc92SAlan Cox */ 107f4b36404SMatt Macy #if !defined(__GNUCLIKE_ASM) 108e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10986d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 11086d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 11108c40841SAlan Cox 1123d673254SMark Johnston int atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src); 1133d673254SMark Johnston int atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src); 114065b12a7SPoul-Henning Kamp int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 1153d673254SMark Johnston int atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src); 1163d673254SMark Johnston int atomic_fcmpset_short(volatile u_short *dst, u_short *expect, 1173d673254SMark Johnston u_short src); 118e7a98aefSMateusz Guzik int atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src); 1193c2bc2bfSJohn Baldwin u_int atomic_fetchadd_int(volatile u_int *p, u_int v); 1208a1ee2d3SJung-uk Kim int atomic_testandset_int(volatile u_int *p, u_int v); 121dfdc9a05SSepherosa Ziehau int atomic_testandclear_int(volatile u_int *p, u_int v); 1228954a9a4SKonstantin Belousov void atomic_thread_fence_acq(void); 1238954a9a4SKonstantin Belousov void atomic_thread_fence_acq_rel(void); 1248954a9a4SKonstantin Belousov void atomic_thread_fence_rel(void); 1258954a9a4SKonstantin Belousov void atomic_thread_fence_seq_cst(void); 126819e370cSPoul-Henning Kamp 1277626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 128fa9f322dSKonstantin Belousov u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) 129fa9f322dSKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 1308306a37bSMark Murray void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 1318a6b1c8fSJohn Baldwin 1325188b5f3SJung-uk Kim int atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t); 13325a1e0f6SHans Petter Selasky int atomic_fcmpset_64(volatile uint64_t *, uint64_t *, uint64_t); 1343264fd70SJung-uk Kim uint64_t atomic_load_acq_64(volatile uint64_t *); 1353264fd70SJung-uk Kim void atomic_store_rel_64(volatile uint64_t *, uint64_t); 1365188b5f3SJung-uk Kim uint64_t atomic_swap_64(volatile uint64_t *, uint64_t); 137322f006eSHans Petter Selasky uint64_t atomic_fetchadd_64(volatile uint64_t *, uint64_t); 13843bb1274SHans Petter Selasky void atomic_add_64(volatile uint64_t *, uint64_t); 13943bb1274SHans Petter Selasky void atomic_subtract_64(volatile uint64_t *, uint64_t); 1403264fd70SJung-uk Kim 14148281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */ 1424c5aee92SMark Murray 1432a89a48fSJohn Baldwin /* 144f28e1c8fSBruce Evans * For userland, always use lock prefixes so that the binaries will run 145f28e1c8fSBruce Evans * on both SMP and !SMP systems. 1462a89a48fSJohn Baldwin */ 147a7a7f5b4SHans Petter Selasky #if defined(SMP) || !defined(_KERNEL) || defined(KLD_MODULE) 1487e4277e5SBruce Evans #define MPLOCKED "lock ; " 149d2f22d70SBruce Evans #else 15047b8bc92SAlan Cox #define MPLOCKED 151d2f22d70SBruce Evans #endif 152069e9bc1SDoug Rabson 15347b8bc92SAlan Cox /* 15486d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 15586d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 15686d2e48cSAttilio Rao * in order to avoid that for memory barriers. 15747b8bc92SAlan Cox */ 158e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 15947b8bc92SAlan Cox static __inline void \ 16003e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 16147b8bc92SAlan Cox { \ 1627e4277e5SBruce Evans __asm __volatile(MPLOCKED OP \ 163fe94be3dSJung-uk Kim : "+m" (*p) \ 164fe94be3dSJung-uk Kim : CONS (V) \ 1657222d2fbSKonstantin Belousov : "cc"); \ 1666d800f89SBruce Evans } \ 16786d2e48cSAttilio Rao \ 16886d2e48cSAttilio Rao static __inline void \ 16986d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 17086d2e48cSAttilio Rao { \ 17186d2e48cSAttilio Rao __asm __volatile(MPLOCKED OP \ 172fe94be3dSJung-uk Kim : "+m" (*p) \ 173fe94be3dSJung-uk Kim : CONS (V) \ 1747222d2fbSKonstantin Belousov : "memory", "cc"); \ 17586d2e48cSAttilio Rao } \ 1766d800f89SBruce Evans struct __hack 1774c5aee92SMark Murray 178819e370cSPoul-Henning Kamp /* 1793d673254SMark Johnston * Atomic compare and set, used by the mutex functions. 180819e370cSPoul-Henning Kamp * 1813d673254SMark Johnston * cmpset: 1823d673254SMark Johnston * if (*dst == expect) 1833d673254SMark Johnston * *dst = src 184819e370cSPoul-Henning Kamp * 1853d673254SMark Johnston * fcmpset: 1863d673254SMark Johnston * if (*dst == *expect) 1873d673254SMark Johnston * *dst = src 1883d673254SMark Johnston * else 1893d673254SMark Johnston * *expect = *dst 1903d673254SMark Johnston * 1913d673254SMark Johnston * Returns 0 on failure, non-zero on success. 192819e370cSPoul-Henning Kamp */ 1935788c2bdSMark Johnston #define ATOMIC_CMPSET(TYPE, CONS) \ 1943d673254SMark Johnston static __inline int \ 1953d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \ 1963d673254SMark Johnston { \ 1973d673254SMark Johnston u_char res; \ 1983d673254SMark Johnston \ 1993d673254SMark Johnston __asm __volatile( \ 2003d673254SMark Johnston " " MPLOCKED " " \ 2013d673254SMark Johnston " cmpxchg %3,%1 ; " \ 2023d673254SMark Johnston " sete %0 ; " \ 2033d673254SMark Johnston "# atomic_cmpset_" #TYPE " " \ 2043d673254SMark Johnston : "=q" (res), /* 0 */ \ 2053d673254SMark Johnston "+m" (*dst), /* 1 */ \ 2063d673254SMark Johnston "+a" (expect) /* 2 */ \ 2075788c2bdSMark Johnston : CONS (src) /* 3 */ \ 2083d673254SMark Johnston : "memory", "cc"); \ 2093d673254SMark Johnston return (res); \ 2103d673254SMark Johnston } \ 2113d673254SMark Johnston \ 2123d673254SMark Johnston static __inline int \ 2133d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \ 2143d673254SMark Johnston { \ 2153d673254SMark Johnston u_char res; \ 2163d673254SMark Johnston \ 2173d673254SMark Johnston __asm __volatile( \ 2183d673254SMark Johnston " " MPLOCKED " " \ 2193d673254SMark Johnston " cmpxchg %3,%1 ; " \ 2203d673254SMark Johnston " sete %0 ; " \ 2213d673254SMark Johnston "# atomic_fcmpset_" #TYPE " " \ 2223d673254SMark Johnston : "=q" (res), /* 0 */ \ 2233d673254SMark Johnston "+m" (*dst), /* 1 */ \ 2243d673254SMark Johnston "+a" (*expect) /* 2 */ \ 2255788c2bdSMark Johnston : CONS (src) /* 3 */ \ 2263d673254SMark Johnston : "memory", "cc"); \ 2273d673254SMark Johnston return (res); \ 2288448afceSAttilio Rao } 2294c5aee92SMark Murray 2305788c2bdSMark Johnston ATOMIC_CMPSET(char, "q"); 2315788c2bdSMark Johnston ATOMIC_CMPSET(short, "r"); 2325788c2bdSMark Johnston ATOMIC_CMPSET(int, "r"); 233e7a98aefSMateusz Guzik 2343c2bc2bfSJohn Baldwin /* 2353c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 2363c2bc2bfSJohn Baldwin * the previous value of *p. 2373c2bc2bfSJohn Baldwin */ 2383c2bc2bfSJohn Baldwin static __inline u_int 2393c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 2403c2bc2bfSJohn Baldwin { 2413c2bc2bfSJohn Baldwin 2423c2bc2bfSJohn Baldwin __asm __volatile( 2437e4277e5SBruce Evans " " MPLOCKED " " 2443c2bc2bfSJohn Baldwin " xaddl %0,%1 ; " 2453c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 246ee93d117SJung-uk Kim : "+r" (v), /* 0 */ 247fe94be3dSJung-uk Kim "+m" (*p) /* 1 */ 248fe94be3dSJung-uk Kim : : "cc"); 2493c2bc2bfSJohn Baldwin return (v); 2503c2bc2bfSJohn Baldwin } 2513c2bc2bfSJohn Baldwin 2528a1ee2d3SJung-uk Kim static __inline int 2538a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v) 2548a1ee2d3SJung-uk Kim { 2558a1ee2d3SJung-uk Kim u_char res; 2568a1ee2d3SJung-uk Kim 2578a1ee2d3SJung-uk Kim __asm __volatile( 2588a1ee2d3SJung-uk Kim " " MPLOCKED " " 2598a1ee2d3SJung-uk Kim " btsl %2,%1 ; " 2608a1ee2d3SJung-uk Kim " setc %0 ; " 2618a1ee2d3SJung-uk Kim "# atomic_testandset_int" 2628a1ee2d3SJung-uk Kim : "=q" (res), /* 0 */ 2638a1ee2d3SJung-uk Kim "+m" (*p) /* 1 */ 2648a1ee2d3SJung-uk Kim : "Ir" (v & 0x1f) /* 2 */ 2658a1ee2d3SJung-uk Kim : "cc"); 2668a1ee2d3SJung-uk Kim return (res); 2678a1ee2d3SJung-uk Kim } 2688a1ee2d3SJung-uk Kim 269dfdc9a05SSepherosa Ziehau static __inline int 270dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v) 271dfdc9a05SSepherosa Ziehau { 272dfdc9a05SSepherosa Ziehau u_char res; 273dfdc9a05SSepherosa Ziehau 274dfdc9a05SSepherosa Ziehau __asm __volatile( 275dfdc9a05SSepherosa Ziehau " " MPLOCKED " " 276dfdc9a05SSepherosa Ziehau " btrl %2,%1 ; " 277dfdc9a05SSepherosa Ziehau " setc %0 ; " 278dfdc9a05SSepherosa Ziehau "# atomic_testandclear_int" 279dfdc9a05SSepherosa Ziehau : "=q" (res), /* 0 */ 280dfdc9a05SSepherosa Ziehau "+m" (*p) /* 1 */ 281dfdc9a05SSepherosa Ziehau : "Ir" (v & 0x1f) /* 2 */ 282dfdc9a05SSepherosa Ziehau : "cc"); 283dfdc9a05SSepherosa Ziehau return (res); 284dfdc9a05SSepherosa Ziehau } 285dfdc9a05SSepherosa Ziehau 286fa9f322dSKonstantin Belousov /* 287fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the 288fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics. 289fa9f322dSKonstantin Belousov * 2907626d062SKonstantin Belousov * However, a load may pass a store if they are performed on distinct 291dd5b6425SKonstantin Belousov * addresses, so we need Store/Load barrier for sequentially 292dd5b6425SKonstantin Belousov * consistent fences in SMP kernels. We use "lock addl $0,mem" for a 293dd5b6425SKonstantin Belousov * Store/Load barrier, as recommended by the AMD Software Optimization 294dd5b6425SKonstantin Belousov * Guide, and not mfence. In the kernel, we use a private per-cpu 2950b6476ecSKonstantin Belousov * cache line for "mem", to avoid introducing false data 2960b6476ecSKonstantin Belousov * dependencies. In user space, we use the word at the top of the 2970b6476ecSKonstantin Belousov * stack. 2987626d062SKonstantin Belousov * 2997626d062SKonstantin Belousov * For UP kernels, however, the memory of the single processor is 3007626d062SKonstantin Belousov * always consistent, so we only need to stop the compiler from 3017626d062SKonstantin Belousov * reordering accesses in a way that violates the semantics of acquire 3027626d062SKonstantin Belousov * and release. 303fa9f322dSKonstantin Belousov */ 30448cae112SKonstantin Belousov 3057626d062SKonstantin Belousov #if defined(_KERNEL) 306a7a7f5b4SHans Petter Selasky #if defined(SMP) || defined(KLD_MODULE) 30748cae112SKonstantin Belousov #define __storeload_barrier() __mbk() 3087626d062SKonstantin Belousov #else /* _KERNEL && UP */ 30948cae112SKonstantin Belousov #define __storeload_barrier() __compiler_membar() 3107626d062SKonstantin Belousov #endif /* SMP */ 3117626d062SKonstantin Belousov #else /* !_KERNEL */ 31248cae112SKonstantin Belousov #define __storeload_barrier() __mbu() 3137626d062SKonstantin Belousov #endif /* _KERNEL*/ 3147626d062SKonstantin Belousov 3157626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 3169d979d89SJohn Baldwin static __inline u_##TYPE \ 3179d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 3189d979d89SJohn Baldwin { \ 3199d979d89SJohn Baldwin u_##TYPE res; \ 3209d979d89SJohn Baldwin \ 3217626d062SKonstantin Belousov res = *p; \ 3227626d062SKonstantin Belousov __compiler_membar(); \ 3239d979d89SJohn Baldwin return (res); \ 3249d979d89SJohn Baldwin } \ 3256d800f89SBruce Evans struct __hack 3264c5aee92SMark Murray 3277626d062SKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 3287626d062SKonstantin Belousov static __inline void \ 3297626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \ 3307626d062SKonstantin Belousov { \ 3317626d062SKonstantin Belousov \ 3327626d062SKonstantin Belousov __compiler_membar(); \ 3337626d062SKonstantin Belousov *p = v; \ 3347626d062SKonstantin Belousov } \ 3357626d062SKonstantin Belousov struct __hack 3364c5aee92SMark Murray 3378954a9a4SKonstantin Belousov static __inline void 3388954a9a4SKonstantin Belousov atomic_thread_fence_acq(void) 3398954a9a4SKonstantin Belousov { 3408954a9a4SKonstantin Belousov 3418954a9a4SKonstantin Belousov __compiler_membar(); 3428954a9a4SKonstantin Belousov } 3438954a9a4SKonstantin Belousov 3448954a9a4SKonstantin Belousov static __inline void 3458954a9a4SKonstantin Belousov atomic_thread_fence_rel(void) 3468954a9a4SKonstantin Belousov { 3478954a9a4SKonstantin Belousov 3488954a9a4SKonstantin Belousov __compiler_membar(); 3498954a9a4SKonstantin Belousov } 3508954a9a4SKonstantin Belousov 3518954a9a4SKonstantin Belousov static __inline void 3528954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void) 3538954a9a4SKonstantin Belousov { 3548954a9a4SKonstantin Belousov 3558954a9a4SKonstantin Belousov __compiler_membar(); 3568954a9a4SKonstantin Belousov } 3578954a9a4SKonstantin Belousov 3588954a9a4SKonstantin Belousov static __inline void 3598954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void) 3608954a9a4SKonstantin Belousov { 3618954a9a4SKonstantin Belousov 3628954a9a4SKonstantin Belousov __storeload_barrier(); 3638954a9a4SKonstantin Belousov } 3648954a9a4SKonstantin Belousov 3653264fd70SJung-uk Kim #ifdef _KERNEL 3663264fd70SJung-uk Kim 3673264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS 3685188b5f3SJung-uk Kim int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t); 3695188b5f3SJung-uk Kim int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t); 3703264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i386(volatile uint64_t *); 3713264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i586(volatile uint64_t *); 3723264fd70SJung-uk Kim void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t); 3733264fd70SJung-uk Kim void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t); 3745188b5f3SJung-uk Kim uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t); 3755188b5f3SJung-uk Kim uint64_t atomic_swap_64_i586(volatile uint64_t *, uint64_t); 3763264fd70SJung-uk Kim #endif 3773264fd70SJung-uk Kim 3783264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */ 3795188b5f3SJung-uk Kim static __inline int 3805188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src) 3815188b5f3SJung-uk Kim { 3825188b5f3SJung-uk Kim volatile uint32_t *p; 3835188b5f3SJung-uk Kim u_char res; 3845188b5f3SJung-uk Kim 3855188b5f3SJung-uk Kim p = (volatile uint32_t *)dst; 3865188b5f3SJung-uk Kim __asm __volatile( 3875188b5f3SJung-uk Kim " pushfl ; " 3885188b5f3SJung-uk Kim " cli ; " 3895188b5f3SJung-uk Kim " xorl %1,%%eax ; " 3905188b5f3SJung-uk Kim " xorl %2,%%edx ; " 3915188b5f3SJung-uk Kim " orl %%edx,%%eax ; " 3925188b5f3SJung-uk Kim " jne 1f ; " 3935188b5f3SJung-uk Kim " movl %4,%1 ; " 3945188b5f3SJung-uk Kim " movl %5,%2 ; " 3955188b5f3SJung-uk Kim "1: " 3965188b5f3SJung-uk Kim " sete %3 ; " 3975188b5f3SJung-uk Kim " popfl" 3985188b5f3SJung-uk Kim : "+A" (expect), /* 0 */ 3995188b5f3SJung-uk Kim "+m" (*p), /* 1 */ 4005188b5f3SJung-uk Kim "+m" (*(p + 1)), /* 2 */ 4015188b5f3SJung-uk Kim "=q" (res) /* 3 */ 4025188b5f3SJung-uk Kim : "r" ((uint32_t)src), /* 4 */ 4035188b5f3SJung-uk Kim "r" ((uint32_t)(src >> 32)) /* 5 */ 4045188b5f3SJung-uk Kim : "memory", "cc"); 4055188b5f3SJung-uk Kim return (res); 4065188b5f3SJung-uk Kim } 4075188b5f3SJung-uk Kim 40825a1e0f6SHans Petter Selasky static __inline int 40925a1e0f6SHans Petter Selasky atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 41025a1e0f6SHans Petter Selasky { 41125a1e0f6SHans Petter Selasky 41225a1e0f6SHans Petter Selasky if (atomic_cmpset_64_i386(dst, *expect, src)) { 41325a1e0f6SHans Petter Selasky return (1); 41425a1e0f6SHans Petter Selasky } else { 41525a1e0f6SHans Petter Selasky *expect = *dst; 41625a1e0f6SHans Petter Selasky return (0); 41725a1e0f6SHans Petter Selasky } 41825a1e0f6SHans Petter Selasky } 41925a1e0f6SHans Petter Selasky 4203264fd70SJung-uk Kim static __inline uint64_t 4213264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p) 4223264fd70SJung-uk Kim { 4233264fd70SJung-uk Kim volatile uint32_t *q; 4243264fd70SJung-uk Kim uint64_t res; 4253264fd70SJung-uk Kim 4263264fd70SJung-uk Kim q = (volatile uint32_t *)p; 4273264fd70SJung-uk Kim __asm __volatile( 4283264fd70SJung-uk Kim " pushfl ; " 4293264fd70SJung-uk Kim " cli ; " 4303264fd70SJung-uk Kim " movl %1,%%eax ; " 4313264fd70SJung-uk Kim " movl %2,%%edx ; " 4323264fd70SJung-uk Kim " popfl" 4333264fd70SJung-uk Kim : "=&A" (res) /* 0 */ 4343264fd70SJung-uk Kim : "m" (*q), /* 1 */ 4353264fd70SJung-uk Kim "m" (*(q + 1)) /* 2 */ 4363264fd70SJung-uk Kim : "memory"); 4373264fd70SJung-uk Kim return (res); 4383264fd70SJung-uk Kim } 4393264fd70SJung-uk Kim 4403264fd70SJung-uk Kim static __inline void 4413264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 4423264fd70SJung-uk Kim { 4433264fd70SJung-uk Kim volatile uint32_t *q; 4443264fd70SJung-uk Kim 4453264fd70SJung-uk Kim q = (volatile uint32_t *)p; 4463264fd70SJung-uk Kim __asm __volatile( 4473264fd70SJung-uk Kim " pushfl ; " 4483264fd70SJung-uk Kim " cli ; " 4493264fd70SJung-uk Kim " movl %%eax,%0 ; " 4503264fd70SJung-uk Kim " movl %%edx,%1 ; " 4513264fd70SJung-uk Kim " popfl" 4523264fd70SJung-uk Kim : "=m" (*q), /* 0 */ 4533264fd70SJung-uk Kim "=m" (*(q + 1)) /* 1 */ 4543264fd70SJung-uk Kim : "A" (v) /* 2 */ 4553264fd70SJung-uk Kim : "memory"); 4563264fd70SJung-uk Kim } 4573264fd70SJung-uk Kim 4583264fd70SJung-uk Kim static __inline uint64_t 4595188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v) 4605188b5f3SJung-uk Kim { 4615188b5f3SJung-uk Kim volatile uint32_t *q; 4625188b5f3SJung-uk Kim uint64_t res; 4635188b5f3SJung-uk Kim 4645188b5f3SJung-uk Kim q = (volatile uint32_t *)p; 4655188b5f3SJung-uk Kim __asm __volatile( 4665188b5f3SJung-uk Kim " pushfl ; " 4675188b5f3SJung-uk Kim " cli ; " 4685188b5f3SJung-uk Kim " movl %1,%%eax ; " 4695188b5f3SJung-uk Kim " movl %2,%%edx ; " 4705188b5f3SJung-uk Kim " movl %4,%2 ; " 4715188b5f3SJung-uk Kim " movl %3,%1 ; " 4725188b5f3SJung-uk Kim " popfl" 4735188b5f3SJung-uk Kim : "=&A" (res), /* 0 */ 4745188b5f3SJung-uk Kim "+m" (*q), /* 1 */ 4755188b5f3SJung-uk Kim "+m" (*(q + 1)) /* 2 */ 4765188b5f3SJung-uk Kim : "r" ((uint32_t)v), /* 3 */ 4775188b5f3SJung-uk Kim "r" ((uint32_t)(v >> 32))); /* 4 */ 4785188b5f3SJung-uk Kim return (res); 4795188b5f3SJung-uk Kim } 4805188b5f3SJung-uk Kim 4815188b5f3SJung-uk Kim static __inline int 4825188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src) 4835188b5f3SJung-uk Kim { 4845188b5f3SJung-uk Kim u_char res; 4855188b5f3SJung-uk Kim 4865188b5f3SJung-uk Kim __asm __volatile( 4875188b5f3SJung-uk Kim " " MPLOCKED " " 4885188b5f3SJung-uk Kim " cmpxchg8b %1 ; " 4895188b5f3SJung-uk Kim " sete %0" 4905188b5f3SJung-uk Kim : "=q" (res), /* 0 */ 4915188b5f3SJung-uk Kim "+m" (*dst), /* 1 */ 4925188b5f3SJung-uk Kim "+A" (expect) /* 2 */ 4935188b5f3SJung-uk Kim : "b" ((uint32_t)src), /* 3 */ 4945188b5f3SJung-uk Kim "c" ((uint32_t)(src >> 32)) /* 4 */ 4955188b5f3SJung-uk Kim : "memory", "cc"); 4965188b5f3SJung-uk Kim return (res); 4975188b5f3SJung-uk Kim } 4985188b5f3SJung-uk Kim 49925a1e0f6SHans Petter Selasky static __inline int 50025a1e0f6SHans Petter Selasky atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 50125a1e0f6SHans Petter Selasky { 50225a1e0f6SHans Petter Selasky u_char res; 50325a1e0f6SHans Petter Selasky 50425a1e0f6SHans Petter Selasky __asm __volatile( 50525a1e0f6SHans Petter Selasky " " MPLOCKED " " 50625a1e0f6SHans Petter Selasky " cmpxchg8b %1 ; " 50725a1e0f6SHans Petter Selasky " sete %0" 50825a1e0f6SHans Petter Selasky : "=q" (res), /* 0 */ 50925a1e0f6SHans Petter Selasky "+m" (*dst), /* 1 */ 51025a1e0f6SHans Petter Selasky "+A" (*expect) /* 2 */ 51125a1e0f6SHans Petter Selasky : "b" ((uint32_t)src), /* 3 */ 51225a1e0f6SHans Petter Selasky "c" ((uint32_t)(src >> 32)) /* 4 */ 51325a1e0f6SHans Petter Selasky : "memory", "cc"); 51425a1e0f6SHans Petter Selasky return (res); 51525a1e0f6SHans Petter Selasky } 51625a1e0f6SHans Petter Selasky 5175188b5f3SJung-uk Kim static __inline uint64_t 5183264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p) 5193264fd70SJung-uk Kim { 5203264fd70SJung-uk Kim uint64_t res; 5213264fd70SJung-uk Kim 5223264fd70SJung-uk Kim __asm __volatile( 5233264fd70SJung-uk Kim " movl %%ebx,%%eax ; " 5243264fd70SJung-uk Kim " movl %%ecx,%%edx ; " 5253264fd70SJung-uk Kim " " MPLOCKED " " 5263264fd70SJung-uk Kim " cmpxchg8b %1" 5273264fd70SJung-uk Kim : "=&A" (res), /* 0 */ 5283264fd70SJung-uk Kim "+m" (*p) /* 1 */ 5293264fd70SJung-uk Kim : : "memory", "cc"); 5303264fd70SJung-uk Kim return (res); 5313264fd70SJung-uk Kim } 5323264fd70SJung-uk Kim 5333264fd70SJung-uk Kim static __inline void 5343264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 5353264fd70SJung-uk Kim { 5363264fd70SJung-uk Kim 5373264fd70SJung-uk Kim __asm __volatile( 5383264fd70SJung-uk Kim " movl %%eax,%%ebx ; " 5393264fd70SJung-uk Kim " movl %%edx,%%ecx ; " 5403264fd70SJung-uk Kim "1: " 5413264fd70SJung-uk Kim " " MPLOCKED " " 5423264fd70SJung-uk Kim " cmpxchg8b %0 ; " 5433264fd70SJung-uk Kim " jne 1b" 5443264fd70SJung-uk Kim : "+m" (*p), /* 0 */ 5453264fd70SJung-uk Kim "+A" (v) /* 1 */ 5463264fd70SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 5473264fd70SJung-uk Kim } 5483264fd70SJung-uk Kim 5493264fd70SJung-uk Kim static __inline uint64_t 5505188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v) 5515188b5f3SJung-uk Kim { 5525188b5f3SJung-uk Kim 5535188b5f3SJung-uk Kim __asm __volatile( 5545188b5f3SJung-uk Kim " movl %%eax,%%ebx ; " 5555188b5f3SJung-uk Kim " movl %%edx,%%ecx ; " 5565188b5f3SJung-uk Kim "1: " 5575188b5f3SJung-uk Kim " " MPLOCKED " " 5585188b5f3SJung-uk Kim " cmpxchg8b %0 ; " 5595188b5f3SJung-uk Kim " jne 1b" 5605188b5f3SJung-uk Kim : "+m" (*p), /* 0 */ 5615188b5f3SJung-uk Kim "+A" (v) /* 1 */ 5625188b5f3SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 5635188b5f3SJung-uk Kim return (v); 5645188b5f3SJung-uk Kim } 5655188b5f3SJung-uk Kim 5665188b5f3SJung-uk Kim static __inline int 5675188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src) 5685188b5f3SJung-uk Kim { 5695188b5f3SJung-uk Kim 5705188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5715188b5f3SJung-uk Kim return (atomic_cmpset_64_i386(dst, expect, src)); 5725188b5f3SJung-uk Kim else 5735188b5f3SJung-uk Kim return (atomic_cmpset_64_i586(dst, expect, src)); 5745188b5f3SJung-uk Kim } 5755188b5f3SJung-uk Kim 57625a1e0f6SHans Petter Selasky static __inline int 57725a1e0f6SHans Petter Selasky atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 57825a1e0f6SHans Petter Selasky { 57925a1e0f6SHans Petter Selasky 58025a1e0f6SHans Petter Selasky if ((cpu_feature & CPUID_CX8) == 0) 58125a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i386(dst, expect, src)); 58225a1e0f6SHans Petter Selasky else 58325a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i586(dst, expect, src)); 58425a1e0f6SHans Petter Selasky } 58525a1e0f6SHans Petter Selasky 5865188b5f3SJung-uk Kim static __inline uint64_t 5873264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p) 5883264fd70SJung-uk Kim { 5893264fd70SJung-uk Kim 5903264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5913264fd70SJung-uk Kim return (atomic_load_acq_64_i386(p)); 5923264fd70SJung-uk Kim else 5933264fd70SJung-uk Kim return (atomic_load_acq_64_i586(p)); 5943264fd70SJung-uk Kim } 5953264fd70SJung-uk Kim 5963264fd70SJung-uk Kim static __inline void 5973264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v) 5983264fd70SJung-uk Kim { 5993264fd70SJung-uk Kim 6003264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 6013264fd70SJung-uk Kim atomic_store_rel_64_i386(p, v); 6023264fd70SJung-uk Kim else 6033264fd70SJung-uk Kim atomic_store_rel_64_i586(p, v); 6043264fd70SJung-uk Kim } 6053264fd70SJung-uk Kim 6065188b5f3SJung-uk Kim static __inline uint64_t 6075188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v) 6085188b5f3SJung-uk Kim { 6095188b5f3SJung-uk Kim 6105188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 6115188b5f3SJung-uk Kim return (atomic_swap_64_i386(p, v)); 6125188b5f3SJung-uk Kim else 6135188b5f3SJung-uk Kim return (atomic_swap_64_i586(p, v)); 6145188b5f3SJung-uk Kim } 6155188b5f3SJung-uk Kim 616322f006eSHans Petter Selasky static __inline uint64_t 617322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v) 618322f006eSHans Petter Selasky { 619322f006eSHans Petter Selasky 620322f006eSHans Petter Selasky for (;;) { 621322f006eSHans Petter Selasky uint64_t t = *p; 622322f006eSHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 623322f006eSHans Petter Selasky return (t); 624322f006eSHans Petter Selasky } 625322f006eSHans Petter Selasky } 626322f006eSHans Petter Selasky 62743bb1274SHans Petter Selasky static __inline void 62843bb1274SHans Petter Selasky atomic_add_64(volatile uint64_t *p, uint64_t v) 62943bb1274SHans Petter Selasky { 63043bb1274SHans Petter Selasky uint64_t t; 63143bb1274SHans Petter Selasky 63243bb1274SHans Petter Selasky for (;;) { 63343bb1274SHans Petter Selasky t = *p; 63443bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 63543bb1274SHans Petter Selasky break; 63643bb1274SHans Petter Selasky } 63743bb1274SHans Petter Selasky } 63843bb1274SHans Petter Selasky 63943bb1274SHans Petter Selasky static __inline void 64043bb1274SHans Petter Selasky atomic_subtract_64(volatile uint64_t *p, uint64_t v) 64143bb1274SHans Petter Selasky { 64243bb1274SHans Petter Selasky uint64_t t; 64343bb1274SHans Petter Selasky 64443bb1274SHans Petter Selasky for (;;) { 64543bb1274SHans Petter Selasky t = *p; 64643bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t - v)) 64743bb1274SHans Petter Selasky break; 64843bb1274SHans Petter Selasky } 64943bb1274SHans Petter Selasky } 65043bb1274SHans Petter Selasky 6513264fd70SJung-uk Kim #endif /* _KERNEL */ 6523264fd70SJung-uk Kim 65348281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 6548a6b1c8fSJohn Baldwin 6558306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 6568306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 6578306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 6588306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 6598a6b1c8fSJohn Baldwin 6608306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 6618306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 6628306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 6638306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 6648a6b1c8fSJohn Baldwin 6658306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 6668306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 6678306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 6688306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 6698a6b1c8fSJohn Baldwin 6708306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 6718306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 6728306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 6738306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 6749d979d89SJohn Baldwin 6757626d062SKonstantin Belousov #define ATOMIC_LOADSTORE(TYPE) \ 6767626d062SKonstantin Belousov ATOMIC_LOAD(TYPE); \ 6777626d062SKonstantin Belousov ATOMIC_STORE(TYPE) 678fa9f322dSKonstantin Belousov 6797626d062SKonstantin Belousov ATOMIC_LOADSTORE(char); 6807626d062SKonstantin Belousov ATOMIC_LOADSTORE(short); 6817626d062SKonstantin Belousov ATOMIC_LOADSTORE(int); 6827626d062SKonstantin Belousov ATOMIC_LOADSTORE(long); 683ccbdd9eeSJohn Baldwin 6848a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 685fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD 686fa9f322dSKonstantin Belousov #undef ATOMIC_STORE 6877626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE 688ccbdd9eeSJohn Baldwin 689f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 69048281036SJohn Baldwin 69148281036SJohn Baldwin static __inline int 692065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 69348281036SJohn Baldwin { 69448281036SJohn Baldwin 695065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 69648281036SJohn Baldwin (u_int)src)); 69748281036SJohn Baldwin } 69848281036SJohn Baldwin 69925a1e0f6SHans Petter Selasky static __inline int 70025a1e0f6SHans Petter Selasky atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src) 70125a1e0f6SHans Petter Selasky { 70225a1e0f6SHans Petter Selasky 70325a1e0f6SHans Petter Selasky return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect, 70425a1e0f6SHans Petter Selasky (u_int)src)); 70525a1e0f6SHans Petter Selasky } 70625a1e0f6SHans Petter Selasky 7076eb4157fSPawel Jakub Dawidek static __inline u_long 7086eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 7096eb4157fSPawel Jakub Dawidek { 7106eb4157fSPawel Jakub Dawidek 7116eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 7126eb4157fSPawel Jakub Dawidek } 7136eb4157fSPawel Jakub Dawidek 7148a1ee2d3SJung-uk Kim static __inline int 7158a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v) 7168a1ee2d3SJung-uk Kim { 7178a1ee2d3SJung-uk Kim 7188a1ee2d3SJung-uk Kim return (atomic_testandset_int((volatile u_int *)p, v)); 7198a1ee2d3SJung-uk Kim } 7208a1ee2d3SJung-uk Kim 721dfdc9a05SSepherosa Ziehau static __inline int 722dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v) 723dfdc9a05SSepherosa Ziehau { 724dfdc9a05SSepherosa Ziehau 725dfdc9a05SSepherosa Ziehau return (atomic_testandclear_int((volatile u_int *)p, v)); 726dfdc9a05SSepherosa Ziehau } 727dfdc9a05SSepherosa Ziehau 7288a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */ 72948281036SJohn Baldwin #ifdef __GNUCLIKE_ASM 73048281036SJohn Baldwin 73148281036SJohn Baldwin static __inline u_int 7328a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v) 73348281036SJohn Baldwin { 73448281036SJohn Baldwin 73548281036SJohn Baldwin __asm __volatile( 73648281036SJohn Baldwin " xchgl %1,%0 ; " 7378a1ee2d3SJung-uk Kim "# atomic_swap_int" 7388a1ee2d3SJung-uk Kim : "+r" (v), /* 0 */ 739fe94be3dSJung-uk Kim "+m" (*p)); /* 1 */ 7408a1ee2d3SJung-uk Kim return (v); 74148281036SJohn Baldwin } 74248281036SJohn Baldwin 74348281036SJohn Baldwin static __inline u_long 7448a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v) 74548281036SJohn Baldwin { 74648281036SJohn Baldwin 7478a1ee2d3SJung-uk Kim return (atomic_swap_int((volatile u_int *)p, (u_int)v)); 74848281036SJohn Baldwin } 74948281036SJohn Baldwin 75048281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */ 75148281036SJohn Baldwin 7528a1ee2d3SJung-uk Kim u_int atomic_swap_int(volatile u_int *p, u_int v); 7538a1ee2d3SJung-uk Kim u_long atomic_swap_long(volatile u_long *p, u_long v); 75448281036SJohn Baldwin 75548281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */ 75648281036SJohn Baldwin 75786d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 75886d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 75986d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 76086d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 76186d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 76286d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 76386d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 76486d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 7653d673254SMark Johnston #define atomic_cmpset_acq_char atomic_cmpset_char 7663d673254SMark Johnston #define atomic_cmpset_rel_char atomic_cmpset_char 7673d673254SMark Johnston #define atomic_fcmpset_acq_char atomic_fcmpset_char 7683d673254SMark Johnston #define atomic_fcmpset_rel_char atomic_fcmpset_char 7698a6b1c8fSJohn Baldwin 77086d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 77186d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 77286d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 77386d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 77486d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 77586d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 77686d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 77786d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 7783d673254SMark Johnston #define atomic_cmpset_acq_short atomic_cmpset_short 7793d673254SMark Johnston #define atomic_cmpset_rel_short atomic_cmpset_short 7803d673254SMark Johnston #define atomic_fcmpset_acq_short atomic_fcmpset_short 7813d673254SMark Johnston #define atomic_fcmpset_rel_short atomic_fcmpset_short 7828a6b1c8fSJohn Baldwin 78386d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 78486d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 78586d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 78686d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 78786d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 78886d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 78986d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 79086d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 7918448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int 7928448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int 793e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_int atomic_fcmpset_int 794e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_int atomic_fcmpset_int 7958a6b1c8fSJohn Baldwin 79686d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 79786d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 79886d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 79986d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 80086d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 80186d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 80286d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 80386d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 8048448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long 8058448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long 806e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_long atomic_fcmpset_long 807e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_long atomic_fcmpset_long 8088a6b1c8fSJohn Baldwin 8098a1ee2d3SJung-uk Kim #define atomic_readandclear_int(p) atomic_swap_int(p, 0) 8108a1ee2d3SJung-uk Kim #define atomic_readandclear_long(p) atomic_swap_long(p, 0) 811*ca0ec73cSConrad Meyer #define atomic_testandset_acq_long atomic_testandset_long 8128a1ee2d3SJung-uk Kim 81348281036SJohn Baldwin /* Operations on 8-bit bytes. */ 8148a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 8158a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 8168a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 8178a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 8188a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 8198a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 8208a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 8218a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 8228a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 8238a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 8248a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 8258a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 8268a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 8278a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 8283d673254SMark Johnston #define atomic_cmpset_8 atomic_cmpset_char 8293d673254SMark Johnston #define atomic_cmpset_acq_8 atomic_cmpset_acq_char 8303d673254SMark Johnston #define atomic_cmpset_rel_8 atomic_cmpset_rel_char 8313d673254SMark Johnston #define atomic_fcmpset_8 atomic_fcmpset_char 8323d673254SMark Johnston #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char 8333d673254SMark Johnston #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char 8348a6b1c8fSJohn Baldwin 83548281036SJohn Baldwin /* Operations on 16-bit words. */ 8368a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 8378a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 8388a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 8398a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 8408a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 8418a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 8428a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 8438a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 8448a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 8458a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 8468a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 8478a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 8488a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 8498a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 8503d673254SMark Johnston #define atomic_cmpset_16 atomic_cmpset_short 8513d673254SMark Johnston #define atomic_cmpset_acq_16 atomic_cmpset_acq_short 8523d673254SMark Johnston #define atomic_cmpset_rel_16 atomic_cmpset_rel_short 8533d673254SMark Johnston #define atomic_fcmpset_16 atomic_fcmpset_short 8543d673254SMark Johnston #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short 8553d673254SMark Johnston #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short 8568a6b1c8fSJohn Baldwin 85748281036SJohn Baldwin /* Operations on 32-bit double words. */ 8588a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 8598a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 8608a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 8618a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 8628a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 8638a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 8648a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 8658a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 8668a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 8678a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 8688a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 8698a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 8708a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 8718a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 8728a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 8738a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 8748a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 875e7a98aefSMateusz Guzik #define atomic_fcmpset_32 atomic_fcmpset_int 876e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int 877e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int 8788a1ee2d3SJung-uk Kim #define atomic_swap_32 atomic_swap_int 8798a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 8803c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 8818a1ee2d3SJung-uk Kim #define atomic_testandset_32 atomic_testandset_int 882dfdc9a05SSepherosa Ziehau #define atomic_testandclear_32 atomic_testandclear_int 8838a6b1c8fSJohn Baldwin 884db8bee42SAndriy Gapon #ifdef _KERNEL 88543bb1274SHans Petter Selasky /* Operations on 64-bit quad words. */ 88643bb1274SHans Petter Selasky #define atomic_cmpset_acq_64 atomic_cmpset_64 88743bb1274SHans Petter Selasky #define atomic_cmpset_rel_64 atomic_cmpset_64 88825a1e0f6SHans Petter Selasky #define atomic_fcmpset_acq_64 atomic_fcmpset_64 88925a1e0f6SHans Petter Selasky #define atomic_fcmpset_rel_64 atomic_fcmpset_64 89043bb1274SHans Petter Selasky #define atomic_fetchadd_acq_64 atomic_fetchadd_64 89143bb1274SHans Petter Selasky #define atomic_fetchadd_rel_64 atomic_fetchadd_64 89243bb1274SHans Petter Selasky #define atomic_add_acq_64 atomic_add_64 89343bb1274SHans Petter Selasky #define atomic_add_rel_64 atomic_add_64 89443bb1274SHans Petter Selasky #define atomic_subtract_acq_64 atomic_subtract_64 89543bb1274SHans Petter Selasky #define atomic_subtract_rel_64 atomic_subtract_64 8963a91d106SKonstantin Belousov #define atomic_load_64 atomic_load_acq_64 8973a91d106SKonstantin Belousov #define atomic_store_64 atomic_store_rel_64 898db8bee42SAndriy Gapon #endif 89943bb1274SHans Petter Selasky 90048281036SJohn Baldwin /* Operations on pointers. */ 9016f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 9026f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 9036f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 9046f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 9056f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 9066f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 9076f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 9086f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 9096f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 9106f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 9116f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 9126f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 9136f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 9146f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 9156f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 9166f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 9176f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 9186f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 9196f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 9206f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 9216f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 9226f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 9236f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 9246f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 9256f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 9266f0f8ccaSDag-Erling Smørgrav atomic_load_acq_int((volatile u_int *)(p)) 9276f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 9286f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 9296f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 9306f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 9316f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 9326c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 9336c296ffaSBruce Evans (u_int)(new)) 9346f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 9356c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 9366c296ffaSBruce Evans (u_int)(new)) 937e7a98aefSMateusz Guzik #define atomic_fcmpset_ptr(dst, old, new) \ 938e7a98aefSMateusz Guzik atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new)) 939e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_ptr(dst, old, new) \ 940e7a98aefSMateusz Guzik atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \ 941e7a98aefSMateusz Guzik (u_int)(new)) 942e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_ptr(dst, old, new) \ 943e7a98aefSMateusz Guzik atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \ 944e7a98aefSMateusz Guzik (u_int)(new)) 9458a1ee2d3SJung-uk Kim #define atomic_swap_ptr(p, v) \ 9468a1ee2d3SJung-uk Kim atomic_swap_int((volatile u_int *)(p), (u_int)(v)) 9476f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 9486f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 949ccbdd9eeSJohn Baldwin 950f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 9516c296ffaSBruce Evans 95248cae112SKonstantin Belousov #if defined(_KERNEL) 95348cae112SKonstantin Belousov #define mb() __mbk() 95448cae112SKonstantin Belousov #define wmb() __mbk() 95548cae112SKonstantin Belousov #define rmb() __mbk() 95648cae112SKonstantin Belousov #else 95748cae112SKonstantin Belousov #define mb() __mbu() 95848cae112SKonstantin Belousov #define wmb() __mbu() 95948cae112SKonstantin Belousov #define rmb() __mbu() 96048cae112SKonstantin Belousov #endif 96148cae112SKonstantin Belousov 962069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 963