1069e9bc1SDoug Rabson /*- 24d846d26SWarner Losh * SPDX-License-Identifier: BSD-2-Clause 383ef78beSPedro F. Giffuni * 4069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 5069e9bc1SDoug Rabson * All rights reserved. 6069e9bc1SDoug Rabson * 7069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 8069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 9069e9bc1SDoug Rabson * are met: 10069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 12069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 13069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 14069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 15069e9bc1SDoug Rabson * 16069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 17069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 20069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 22069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 23069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26069e9bc1SDoug Rabson * SUCH DAMAGE. 27069e9bc1SDoug Rabson */ 28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 30069e9bc1SDoug Rabson 3130d4f9e8SKonstantin Belousov #include <sys/atomic_common.h> 3230d4f9e8SKonstantin Belousov 333264fd70SJung-uk Kim #ifdef _KERNEL 343264fd70SJung-uk Kim #include <machine/md_var.h> 353264fd70SJung-uk Kim #include <machine/specialreg.h> 363264fd70SJung-uk Kim #endif 373264fd70SJung-uk Kim 3848cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF 3948cae112SKonstantin Belousov /* 4048cae112SKonstantin Belousov * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf). 4148cae112SKonstantin Belousov * 4248cae112SKonstantin Belousov * The open-coded number is used instead of the symbolic expression to 4348cae112SKonstantin Belousov * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers. 4448cae112SKonstantin Belousov * An assertion in i386/vm_machdep.c ensures that the value is correct. 4548cae112SKonstantin Belousov */ 4683c9dea1SGleb Smirnoff #define __OFFSETOF_MONITORBUF 0x80 4748cae112SKonstantin Belousov 4848cae112SKonstantin Belousov static __inline void 4948cae112SKonstantin Belousov __mbk(void) 5048cae112SKonstantin Belousov { 5148cae112SKonstantin Belousov 5288478cc0SRyan Libby __asm __volatile("lock; addl $0,%%fs:%c0" 5388478cc0SRyan Libby : : "i" (__OFFSETOF_MONITORBUF) : "memory", "cc"); 5448cae112SKonstantin Belousov } 5548cae112SKonstantin Belousov 5648cae112SKonstantin Belousov static __inline void 5748cae112SKonstantin Belousov __mbu(void) 5848cae112SKonstantin Belousov { 5948cae112SKonstantin Belousov 6048cae112SKonstantin Belousov __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc"); 6148cae112SKonstantin Belousov } 6248cae112SKonstantin Belousov #endif 63db7f0b97SKip Macy 64069e9bc1SDoug Rabson /* 65f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 66f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 67069e9bc1SDoug Rabson * 6847b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 6947b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 7047b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 7147b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 7247b8bc92SAlan Cox * 7347b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 7447b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 7547b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 7647b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 7747b8bc92SAlan Cox * 7847b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 7947b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 8047b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 8147b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 828a1ee2d3SJung-uk Kim * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);) 83f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 8447b8bc92SAlan Cox * 8547b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 8647b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 8747b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 8847b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 898a1ee2d3SJung-uk Kim * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);) 90f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 91069e9bc1SDoug Rabson */ 92069e9bc1SDoug Rabson 932a89a48fSJohn Baldwin /* 94e6c45f37SGordon Bergling * Always use lock prefixes. The result is slightly less optimal for 959c0b759bSKonstantin Belousov * UP systems, but it matters less now, and sometimes UP is emulated 969c0b759bSKonstantin Belousov * over SMP. 979c0b759bSKonstantin Belousov * 9886d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 9986d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 10086d2e48cSAttilio Rao * in order to avoid that for memory barriers. 10147b8bc92SAlan Cox */ 102e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10347b8bc92SAlan Cox static __inline void \ 10403e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10547b8bc92SAlan Cox { \ 1069c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \ 107fe94be3dSJung-uk Kim : "+m" (*p) \ 108fe94be3dSJung-uk Kim : CONS (V) \ 1097222d2fbSKonstantin Belousov : "cc"); \ 1106d800f89SBruce Evans } \ 11186d2e48cSAttilio Rao \ 11286d2e48cSAttilio Rao static __inline void \ 11386d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11486d2e48cSAttilio Rao { \ 1159c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \ 116fe94be3dSJung-uk Kim : "+m" (*p) \ 117fe94be3dSJung-uk Kim : CONS (V) \ 1187222d2fbSKonstantin Belousov : "memory", "cc"); \ 11986d2e48cSAttilio Rao } \ 1206d800f89SBruce Evans struct __hack 1214c5aee92SMark Murray 122819e370cSPoul-Henning Kamp /* 1233d673254SMark Johnston * Atomic compare and set, used by the mutex functions. 124819e370cSPoul-Henning Kamp * 1253d673254SMark Johnston * cmpset: 1263d673254SMark Johnston * if (*dst == expect) 1273d673254SMark Johnston * *dst = src 128819e370cSPoul-Henning Kamp * 1293d673254SMark Johnston * fcmpset: 1303d673254SMark Johnston * if (*dst == *expect) 1313d673254SMark Johnston * *dst = src 1323d673254SMark Johnston * else 1333d673254SMark Johnston * *expect = *dst 1343d673254SMark Johnston * 1353d673254SMark Johnston * Returns 0 on failure, non-zero on success. 136819e370cSPoul-Henning Kamp */ 1375788c2bdSMark Johnston #define ATOMIC_CMPSET(TYPE, CONS) \ 1383d673254SMark Johnston static __inline int \ 1393d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \ 1403d673254SMark Johnston { \ 1413d673254SMark Johnston u_char res; \ 1423d673254SMark Johnston \ 1433d673254SMark Johnston __asm __volatile( \ 1449c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \ 1453d673254SMark Johnston " sete %0 ; " \ 1463d673254SMark Johnston "# atomic_cmpset_" #TYPE " " \ 1473d673254SMark Johnston : "=q" (res), /* 0 */ \ 1483d673254SMark Johnston "+m" (*dst), /* 1 */ \ 1493d673254SMark Johnston "+a" (expect) /* 2 */ \ 1505788c2bdSMark Johnston : CONS (src) /* 3 */ \ 1513d673254SMark Johnston : "memory", "cc"); \ 1523d673254SMark Johnston return (res); \ 1533d673254SMark Johnston } \ 1543d673254SMark Johnston \ 1553d673254SMark Johnston static __inline int \ 1563d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \ 1573d673254SMark Johnston { \ 1583d673254SMark Johnston u_char res; \ 1593d673254SMark Johnston \ 1603d673254SMark Johnston __asm __volatile( \ 1619c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \ 1623d673254SMark Johnston " sete %0 ; " \ 1633d673254SMark Johnston "# atomic_fcmpset_" #TYPE " " \ 1643d673254SMark Johnston : "=q" (res), /* 0 */ \ 1653d673254SMark Johnston "+m" (*dst), /* 1 */ \ 1663d673254SMark Johnston "+a" (*expect) /* 2 */ \ 1675788c2bdSMark Johnston : CONS (src) /* 3 */ \ 1683d673254SMark Johnston : "memory", "cc"); \ 1693d673254SMark Johnston return (res); \ 1708448afceSAttilio Rao } 1714c5aee92SMark Murray 1725788c2bdSMark Johnston ATOMIC_CMPSET(char, "q"); 1735788c2bdSMark Johnston ATOMIC_CMPSET(short, "r"); 1745788c2bdSMark Johnston ATOMIC_CMPSET(int, "r"); 175e7a98aefSMateusz Guzik 1763c2bc2bfSJohn Baldwin /* 1773c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 1783c2bc2bfSJohn Baldwin * the previous value of *p. 1793c2bc2bfSJohn Baldwin */ 1803c2bc2bfSJohn Baldwin static __inline u_int 1813c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 1823c2bc2bfSJohn Baldwin { 1833c2bc2bfSJohn Baldwin 1843c2bc2bfSJohn Baldwin __asm __volatile( 1859c0b759bSKonstantin Belousov " lock; xaddl %0,%1 ; " 1863c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 187ee93d117SJung-uk Kim : "+r" (v), /* 0 */ 188fe94be3dSJung-uk Kim "+m" (*p) /* 1 */ 189fe94be3dSJung-uk Kim : : "cc"); 1903c2bc2bfSJohn Baldwin return (v); 1913c2bc2bfSJohn Baldwin } 1923c2bc2bfSJohn Baldwin 1938a1ee2d3SJung-uk Kim static __inline int 1948a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v) 1958a1ee2d3SJung-uk Kim { 1968a1ee2d3SJung-uk Kim u_char res; 1978a1ee2d3SJung-uk Kim 1988a1ee2d3SJung-uk Kim __asm __volatile( 1999c0b759bSKonstantin Belousov " lock; btsl %2,%1 ; " 2008a1ee2d3SJung-uk Kim " setc %0 ; " 2018a1ee2d3SJung-uk Kim "# atomic_testandset_int" 2028a1ee2d3SJung-uk Kim : "=q" (res), /* 0 */ 2038a1ee2d3SJung-uk Kim "+m" (*p) /* 1 */ 2048a1ee2d3SJung-uk Kim : "Ir" (v & 0x1f) /* 2 */ 2058a1ee2d3SJung-uk Kim : "cc"); 2068a1ee2d3SJung-uk Kim return (res); 2078a1ee2d3SJung-uk Kim } 2088a1ee2d3SJung-uk Kim 209dfdc9a05SSepherosa Ziehau static __inline int 210dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v) 211dfdc9a05SSepherosa Ziehau { 212dfdc9a05SSepherosa Ziehau u_char res; 213dfdc9a05SSepherosa Ziehau 214dfdc9a05SSepherosa Ziehau __asm __volatile( 2159c0b759bSKonstantin Belousov " lock; btrl %2,%1 ; " 216dfdc9a05SSepherosa Ziehau " setc %0 ; " 217dfdc9a05SSepherosa Ziehau "# atomic_testandclear_int" 218dfdc9a05SSepherosa Ziehau : "=q" (res), /* 0 */ 219dfdc9a05SSepherosa Ziehau "+m" (*p) /* 1 */ 220dfdc9a05SSepherosa Ziehau : "Ir" (v & 0x1f) /* 2 */ 221dfdc9a05SSepherosa Ziehau : "cc"); 222dfdc9a05SSepherosa Ziehau return (res); 223dfdc9a05SSepherosa Ziehau } 224dfdc9a05SSepherosa Ziehau 225fa9f322dSKonstantin Belousov /* 226fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the 227fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics. 228fa9f322dSKonstantin Belousov * 2297626d062SKonstantin Belousov * However, a load may pass a store if they are performed on distinct 230dd5b6425SKonstantin Belousov * addresses, so we need Store/Load barrier for sequentially 231dd5b6425SKonstantin Belousov * consistent fences in SMP kernels. We use "lock addl $0,mem" for a 232dd5b6425SKonstantin Belousov * Store/Load barrier, as recommended by the AMD Software Optimization 233dd5b6425SKonstantin Belousov * Guide, and not mfence. In the kernel, we use a private per-cpu 2340b6476ecSKonstantin Belousov * cache line for "mem", to avoid introducing false data 2350b6476ecSKonstantin Belousov * dependencies. In user space, we use the word at the top of the 2360b6476ecSKonstantin Belousov * stack. 2377626d062SKonstantin Belousov * 2387626d062SKonstantin Belousov * For UP kernels, however, the memory of the single processor is 2397626d062SKonstantin Belousov * always consistent, so we only need to stop the compiler from 2407626d062SKonstantin Belousov * reordering accesses in a way that violates the semantics of acquire 2417626d062SKonstantin Belousov * and release. 242fa9f322dSKonstantin Belousov */ 24348cae112SKonstantin Belousov 2447626d062SKonstantin Belousov #if defined(_KERNEL) 24548cae112SKonstantin Belousov #define __storeload_barrier() __mbk() 2467626d062SKonstantin Belousov #else /* !_KERNEL */ 24748cae112SKonstantin Belousov #define __storeload_barrier() __mbu() 2487626d062SKonstantin Belousov #endif /* _KERNEL*/ 2497626d062SKonstantin Belousov 2507626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 2519d979d89SJohn Baldwin static __inline u_##TYPE \ 252*5e9a82e8SOlivier Certner atomic_load_acq_##TYPE(const volatile u_##TYPE *p) \ 2539d979d89SJohn Baldwin { \ 2549d979d89SJohn Baldwin u_##TYPE res; \ 2559d979d89SJohn Baldwin \ 2567626d062SKonstantin Belousov res = *p; \ 2577626d062SKonstantin Belousov __compiler_membar(); \ 2589d979d89SJohn Baldwin return (res); \ 2599d979d89SJohn Baldwin } \ 2606d800f89SBruce Evans struct __hack 2614c5aee92SMark Murray 2627626d062SKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 2637626d062SKonstantin Belousov static __inline void \ 2647626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \ 2657626d062SKonstantin Belousov { \ 2667626d062SKonstantin Belousov \ 2677626d062SKonstantin Belousov __compiler_membar(); \ 2687626d062SKonstantin Belousov *p = v; \ 2697626d062SKonstantin Belousov } \ 2707626d062SKonstantin Belousov struct __hack 2714c5aee92SMark Murray 2728954a9a4SKonstantin Belousov static __inline void 2738954a9a4SKonstantin Belousov atomic_thread_fence_acq(void) 2748954a9a4SKonstantin Belousov { 2758954a9a4SKonstantin Belousov 2768954a9a4SKonstantin Belousov __compiler_membar(); 2778954a9a4SKonstantin Belousov } 2788954a9a4SKonstantin Belousov 2798954a9a4SKonstantin Belousov static __inline void 2808954a9a4SKonstantin Belousov atomic_thread_fence_rel(void) 2818954a9a4SKonstantin Belousov { 2828954a9a4SKonstantin Belousov 2838954a9a4SKonstantin Belousov __compiler_membar(); 2848954a9a4SKonstantin Belousov } 2858954a9a4SKonstantin Belousov 2868954a9a4SKonstantin Belousov static __inline void 2878954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void) 2888954a9a4SKonstantin Belousov { 2898954a9a4SKonstantin Belousov 2908954a9a4SKonstantin Belousov __compiler_membar(); 2918954a9a4SKonstantin Belousov } 2928954a9a4SKonstantin Belousov 2938954a9a4SKonstantin Belousov static __inline void 2948954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void) 2958954a9a4SKonstantin Belousov { 2968954a9a4SKonstantin Belousov 2978954a9a4SKonstantin Belousov __storeload_barrier(); 2988954a9a4SKonstantin Belousov } 2998954a9a4SKonstantin Belousov 3003264fd70SJung-uk Kim #ifdef _KERNEL 3013264fd70SJung-uk Kim 3023264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS 3035188b5f3SJung-uk Kim int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t); 3045188b5f3SJung-uk Kim int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t); 305*5e9a82e8SOlivier Certner uint64_t atomic_load_acq_64_i386(const volatile uint64_t *); 306*5e9a82e8SOlivier Certner uint64_t atomic_load_acq_64_i586(const volatile uint64_t *); 3073264fd70SJung-uk Kim void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t); 3083264fd70SJung-uk Kim void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t); 3095188b5f3SJung-uk Kim uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t); 3105188b5f3SJung-uk Kim uint64_t atomic_swap_64_i586(volatile uint64_t *, uint64_t); 3113264fd70SJung-uk Kim #endif 3123264fd70SJung-uk Kim 3133264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */ 3145188b5f3SJung-uk Kim static __inline int 3155188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src) 3165188b5f3SJung-uk Kim { 3175188b5f3SJung-uk Kim volatile uint32_t *p; 3185188b5f3SJung-uk Kim u_char res; 3195188b5f3SJung-uk Kim 3205188b5f3SJung-uk Kim p = (volatile uint32_t *)dst; 3215188b5f3SJung-uk Kim __asm __volatile( 3225188b5f3SJung-uk Kim " pushfl ; " 3235188b5f3SJung-uk Kim " cli ; " 3245188b5f3SJung-uk Kim " xorl %1,%%eax ; " 3255188b5f3SJung-uk Kim " xorl %2,%%edx ; " 3265188b5f3SJung-uk Kim " orl %%edx,%%eax ; " 3275188b5f3SJung-uk Kim " jne 1f ; " 3285188b5f3SJung-uk Kim " movl %4,%1 ; " 3295188b5f3SJung-uk Kim " movl %5,%2 ; " 3305188b5f3SJung-uk Kim "1: " 3315188b5f3SJung-uk Kim " sete %3 ; " 3325188b5f3SJung-uk Kim " popfl" 3335188b5f3SJung-uk Kim : "+A" (expect), /* 0 */ 3345188b5f3SJung-uk Kim "+m" (*p), /* 1 */ 3355188b5f3SJung-uk Kim "+m" (*(p + 1)), /* 2 */ 3365188b5f3SJung-uk Kim "=q" (res) /* 3 */ 3375188b5f3SJung-uk Kim : "r" ((uint32_t)src), /* 4 */ 3385188b5f3SJung-uk Kim "r" ((uint32_t)(src >> 32)) /* 5 */ 3395188b5f3SJung-uk Kim : "memory", "cc"); 3405188b5f3SJung-uk Kim return (res); 3415188b5f3SJung-uk Kim } 3425188b5f3SJung-uk Kim 34325a1e0f6SHans Petter Selasky static __inline int 34425a1e0f6SHans Petter Selasky atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 34525a1e0f6SHans Petter Selasky { 34625a1e0f6SHans Petter Selasky 34725a1e0f6SHans Petter Selasky if (atomic_cmpset_64_i386(dst, *expect, src)) { 34825a1e0f6SHans Petter Selasky return (1); 34925a1e0f6SHans Petter Selasky } else { 35025a1e0f6SHans Petter Selasky *expect = *dst; 35125a1e0f6SHans Petter Selasky return (0); 35225a1e0f6SHans Petter Selasky } 35325a1e0f6SHans Petter Selasky } 35425a1e0f6SHans Petter Selasky 3553264fd70SJung-uk Kim static __inline uint64_t 356*5e9a82e8SOlivier Certner atomic_load_acq_64_i386(const volatile uint64_t *p) 3573264fd70SJung-uk Kim { 358*5e9a82e8SOlivier Certner const volatile uint32_t *q; 3593264fd70SJung-uk Kim uint64_t res; 3603264fd70SJung-uk Kim 361*5e9a82e8SOlivier Certner q = (const volatile uint32_t *)p; 3623264fd70SJung-uk Kim __asm __volatile( 3633264fd70SJung-uk Kim " pushfl ; " 3643264fd70SJung-uk Kim " cli ; " 3653264fd70SJung-uk Kim " movl %1,%%eax ; " 3663264fd70SJung-uk Kim " movl %2,%%edx ; " 3673264fd70SJung-uk Kim " popfl" 3683264fd70SJung-uk Kim : "=&A" (res) /* 0 */ 3693264fd70SJung-uk Kim : "m" (*q), /* 1 */ 3703264fd70SJung-uk Kim "m" (*(q + 1)) /* 2 */ 3713264fd70SJung-uk Kim : "memory"); 3723264fd70SJung-uk Kim return (res); 3733264fd70SJung-uk Kim } 3743264fd70SJung-uk Kim 3753264fd70SJung-uk Kim static __inline void 3763264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 3773264fd70SJung-uk Kim { 3783264fd70SJung-uk Kim volatile uint32_t *q; 3793264fd70SJung-uk Kim 3803264fd70SJung-uk Kim q = (volatile uint32_t *)p; 3813264fd70SJung-uk Kim __asm __volatile( 3823264fd70SJung-uk Kim " pushfl ; " 3833264fd70SJung-uk Kim " cli ; " 3843264fd70SJung-uk Kim " movl %%eax,%0 ; " 3853264fd70SJung-uk Kim " movl %%edx,%1 ; " 3863264fd70SJung-uk Kim " popfl" 3873264fd70SJung-uk Kim : "=m" (*q), /* 0 */ 3883264fd70SJung-uk Kim "=m" (*(q + 1)) /* 1 */ 3893264fd70SJung-uk Kim : "A" (v) /* 2 */ 3903264fd70SJung-uk Kim : "memory"); 3913264fd70SJung-uk Kim } 3923264fd70SJung-uk Kim 3933264fd70SJung-uk Kim static __inline uint64_t 3945188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v) 3955188b5f3SJung-uk Kim { 3965188b5f3SJung-uk Kim volatile uint32_t *q; 3975188b5f3SJung-uk Kim uint64_t res; 3985188b5f3SJung-uk Kim 3995188b5f3SJung-uk Kim q = (volatile uint32_t *)p; 4005188b5f3SJung-uk Kim __asm __volatile( 4015188b5f3SJung-uk Kim " pushfl ; " 4025188b5f3SJung-uk Kim " cli ; " 4035188b5f3SJung-uk Kim " movl %1,%%eax ; " 4045188b5f3SJung-uk Kim " movl %2,%%edx ; " 4055188b5f3SJung-uk Kim " movl %4,%2 ; " 4065188b5f3SJung-uk Kim " movl %3,%1 ; " 4075188b5f3SJung-uk Kim " popfl" 4085188b5f3SJung-uk Kim : "=&A" (res), /* 0 */ 4095188b5f3SJung-uk Kim "+m" (*q), /* 1 */ 4105188b5f3SJung-uk Kim "+m" (*(q + 1)) /* 2 */ 4115188b5f3SJung-uk Kim : "r" ((uint32_t)v), /* 3 */ 4125188b5f3SJung-uk Kim "r" ((uint32_t)(v >> 32))); /* 4 */ 4135188b5f3SJung-uk Kim return (res); 4145188b5f3SJung-uk Kim } 4155188b5f3SJung-uk Kim 4165188b5f3SJung-uk Kim static __inline int 4175188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src) 4185188b5f3SJung-uk Kim { 4195188b5f3SJung-uk Kim u_char res; 4205188b5f3SJung-uk Kim 4215188b5f3SJung-uk Kim __asm __volatile( 4229c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; " 4235188b5f3SJung-uk Kim " sete %0" 4245188b5f3SJung-uk Kim : "=q" (res), /* 0 */ 4255188b5f3SJung-uk Kim "+m" (*dst), /* 1 */ 4265188b5f3SJung-uk Kim "+A" (expect) /* 2 */ 4275188b5f3SJung-uk Kim : "b" ((uint32_t)src), /* 3 */ 4285188b5f3SJung-uk Kim "c" ((uint32_t)(src >> 32)) /* 4 */ 4295188b5f3SJung-uk Kim : "memory", "cc"); 4305188b5f3SJung-uk Kim return (res); 4315188b5f3SJung-uk Kim } 4325188b5f3SJung-uk Kim 43325a1e0f6SHans Petter Selasky static __inline int 43425a1e0f6SHans Petter Selasky atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 43525a1e0f6SHans Petter Selasky { 43625a1e0f6SHans Petter Selasky u_char res; 43725a1e0f6SHans Petter Selasky 43825a1e0f6SHans Petter Selasky __asm __volatile( 4399c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; " 44025a1e0f6SHans Petter Selasky " sete %0" 44125a1e0f6SHans Petter Selasky : "=q" (res), /* 0 */ 44225a1e0f6SHans Petter Selasky "+m" (*dst), /* 1 */ 44325a1e0f6SHans Petter Selasky "+A" (*expect) /* 2 */ 44425a1e0f6SHans Petter Selasky : "b" ((uint32_t)src), /* 3 */ 44525a1e0f6SHans Petter Selasky "c" ((uint32_t)(src >> 32)) /* 4 */ 44625a1e0f6SHans Petter Selasky : "memory", "cc"); 44725a1e0f6SHans Petter Selasky return (res); 44825a1e0f6SHans Petter Selasky } 44925a1e0f6SHans Petter Selasky 450*5e9a82e8SOlivier Certner /* 451*5e9a82e8SOlivier Certner * Architecturally always writes back some value to '*p' so will trigger 452*5e9a82e8SOlivier Certner * a #GP(0) on read-only mappings. 453*5e9a82e8SOlivier Certner */ 4545188b5f3SJung-uk Kim static __inline uint64_t 455*5e9a82e8SOlivier Certner atomic_load_acq_64_i586(const volatile uint64_t *p) 4563264fd70SJung-uk Kim { 4573264fd70SJung-uk Kim uint64_t res; 4583264fd70SJung-uk Kim 4593264fd70SJung-uk Kim __asm __volatile( 4603264fd70SJung-uk Kim " movl %%ebx,%%eax ; " 4613264fd70SJung-uk Kim " movl %%ecx,%%edx ; " 4629c0b759bSKonstantin Belousov " lock; cmpxchg8b %1" 463*5e9a82e8SOlivier Certner : "=&A" (res) /* 0 */ 464*5e9a82e8SOlivier Certner : "m" (*p) /* 1 */ 465*5e9a82e8SOlivier Certner : "memory", "cc"); 4663264fd70SJung-uk Kim return (res); 4673264fd70SJung-uk Kim } 4683264fd70SJung-uk Kim 4693264fd70SJung-uk Kim static __inline void 4703264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 4713264fd70SJung-uk Kim { 4723264fd70SJung-uk Kim 4733264fd70SJung-uk Kim __asm __volatile( 4743264fd70SJung-uk Kim " movl %%eax,%%ebx ; " 4753264fd70SJung-uk Kim " movl %%edx,%%ecx ; " 4763264fd70SJung-uk Kim "1: " 4779c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; " 4783264fd70SJung-uk Kim " jne 1b" 4793264fd70SJung-uk Kim : "+m" (*p), /* 0 */ 4803264fd70SJung-uk Kim "+A" (v) /* 1 */ 4813264fd70SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 4823264fd70SJung-uk Kim } 4833264fd70SJung-uk Kim 4843264fd70SJung-uk Kim static __inline uint64_t 4855188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v) 4865188b5f3SJung-uk Kim { 4875188b5f3SJung-uk Kim 4885188b5f3SJung-uk Kim __asm __volatile( 4895188b5f3SJung-uk Kim " movl %%eax,%%ebx ; " 4905188b5f3SJung-uk Kim " movl %%edx,%%ecx ; " 4915188b5f3SJung-uk Kim "1: " 4929c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; " 4935188b5f3SJung-uk Kim " jne 1b" 4945188b5f3SJung-uk Kim : "+m" (*p), /* 0 */ 4955188b5f3SJung-uk Kim "+A" (v) /* 1 */ 4965188b5f3SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 4975188b5f3SJung-uk Kim return (v); 4985188b5f3SJung-uk Kim } 4995188b5f3SJung-uk Kim 5005188b5f3SJung-uk Kim static __inline int 5015188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src) 5025188b5f3SJung-uk Kim { 5035188b5f3SJung-uk Kim 5045188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5055188b5f3SJung-uk Kim return (atomic_cmpset_64_i386(dst, expect, src)); 5065188b5f3SJung-uk Kim else 5075188b5f3SJung-uk Kim return (atomic_cmpset_64_i586(dst, expect, src)); 5085188b5f3SJung-uk Kim } 5095188b5f3SJung-uk Kim 51025a1e0f6SHans Petter Selasky static __inline int 51125a1e0f6SHans Petter Selasky atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 51225a1e0f6SHans Petter Selasky { 51325a1e0f6SHans Petter Selasky 51425a1e0f6SHans Petter Selasky if ((cpu_feature & CPUID_CX8) == 0) 51525a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i386(dst, expect, src)); 51625a1e0f6SHans Petter Selasky else 51725a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i586(dst, expect, src)); 51825a1e0f6SHans Petter Selasky } 51925a1e0f6SHans Petter Selasky 5205188b5f3SJung-uk Kim static __inline uint64_t 521*5e9a82e8SOlivier Certner atomic_load_acq_64(const volatile uint64_t *p) 5223264fd70SJung-uk Kim { 5233264fd70SJung-uk Kim 5243264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5253264fd70SJung-uk Kim return (atomic_load_acq_64_i386(p)); 5263264fd70SJung-uk Kim else 5273264fd70SJung-uk Kim return (atomic_load_acq_64_i586(p)); 5283264fd70SJung-uk Kim } 5293264fd70SJung-uk Kim 5303264fd70SJung-uk Kim static __inline void 5313264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v) 5323264fd70SJung-uk Kim { 5333264fd70SJung-uk Kim 5343264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5353264fd70SJung-uk Kim atomic_store_rel_64_i386(p, v); 5363264fd70SJung-uk Kim else 5373264fd70SJung-uk Kim atomic_store_rel_64_i586(p, v); 5383264fd70SJung-uk Kim } 5393264fd70SJung-uk Kim 5405188b5f3SJung-uk Kim static __inline uint64_t 5415188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v) 5425188b5f3SJung-uk Kim { 5435188b5f3SJung-uk Kim 5445188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5455188b5f3SJung-uk Kim return (atomic_swap_64_i386(p, v)); 5465188b5f3SJung-uk Kim else 5475188b5f3SJung-uk Kim return (atomic_swap_64_i586(p, v)); 5485188b5f3SJung-uk Kim } 5495188b5f3SJung-uk Kim 550322f006eSHans Petter Selasky static __inline uint64_t 551322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v) 552322f006eSHans Petter Selasky { 553322f006eSHans Petter Selasky 554322f006eSHans Petter Selasky for (;;) { 555322f006eSHans Petter Selasky uint64_t t = *p; 556322f006eSHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 557322f006eSHans Petter Selasky return (t); 558322f006eSHans Petter Selasky } 559322f006eSHans Petter Selasky } 560322f006eSHans Petter Selasky 56143bb1274SHans Petter Selasky static __inline void 56243bb1274SHans Petter Selasky atomic_add_64(volatile uint64_t *p, uint64_t v) 56343bb1274SHans Petter Selasky { 56443bb1274SHans Petter Selasky uint64_t t; 56543bb1274SHans Petter Selasky 56643bb1274SHans Petter Selasky for (;;) { 56743bb1274SHans Petter Selasky t = *p; 56843bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 56943bb1274SHans Petter Selasky break; 57043bb1274SHans Petter Selasky } 57143bb1274SHans Petter Selasky } 57243bb1274SHans Petter Selasky 57343bb1274SHans Petter Selasky static __inline void 57443bb1274SHans Petter Selasky atomic_subtract_64(volatile uint64_t *p, uint64_t v) 57543bb1274SHans Petter Selasky { 57643bb1274SHans Petter Selasky uint64_t t; 57743bb1274SHans Petter Selasky 57843bb1274SHans Petter Selasky for (;;) { 57943bb1274SHans Petter Selasky t = *p; 58043bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t - v)) 58143bb1274SHans Petter Selasky break; 58243bb1274SHans Petter Selasky } 58343bb1274SHans Petter Selasky } 58443bb1274SHans Petter Selasky 5853264fd70SJung-uk Kim #endif /* _KERNEL */ 5863264fd70SJung-uk Kim 5878306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 5888306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 5898306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 5908306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 5918a6b1c8fSJohn Baldwin 5928306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 5938306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 5948306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 5958306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 5968a6b1c8fSJohn Baldwin 5978306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 5988306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 5998306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 6008306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 6018a6b1c8fSJohn Baldwin 6028306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 6038306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 6048306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 6058306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 6069d979d89SJohn Baldwin 6077626d062SKonstantin Belousov #define ATOMIC_LOADSTORE(TYPE) \ 6087626d062SKonstantin Belousov ATOMIC_LOAD(TYPE); \ 6097626d062SKonstantin Belousov ATOMIC_STORE(TYPE) 610fa9f322dSKonstantin Belousov 6117626d062SKonstantin Belousov ATOMIC_LOADSTORE(char); 6127626d062SKonstantin Belousov ATOMIC_LOADSTORE(short); 6137626d062SKonstantin Belousov ATOMIC_LOADSTORE(int); 6147626d062SKonstantin Belousov ATOMIC_LOADSTORE(long); 615ccbdd9eeSJohn Baldwin 6168a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 617fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD 618fa9f322dSKonstantin Belousov #undef ATOMIC_STORE 6197626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE 620ccbdd9eeSJohn Baldwin 621f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 62248281036SJohn Baldwin 62348281036SJohn Baldwin static __inline int 624065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 62548281036SJohn Baldwin { 62648281036SJohn Baldwin 627065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 62848281036SJohn Baldwin (u_int)src)); 62948281036SJohn Baldwin } 63048281036SJohn Baldwin 63125a1e0f6SHans Petter Selasky static __inline int 63225a1e0f6SHans Petter Selasky atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src) 63325a1e0f6SHans Petter Selasky { 63425a1e0f6SHans Petter Selasky 63525a1e0f6SHans Petter Selasky return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect, 63625a1e0f6SHans Petter Selasky (u_int)src)); 63725a1e0f6SHans Petter Selasky } 63825a1e0f6SHans Petter Selasky 6396eb4157fSPawel Jakub Dawidek static __inline u_long 6406eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 6416eb4157fSPawel Jakub Dawidek { 6426eb4157fSPawel Jakub Dawidek 6436eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 6446eb4157fSPawel Jakub Dawidek } 6456eb4157fSPawel Jakub Dawidek 6468a1ee2d3SJung-uk Kim static __inline int 6478a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v) 6488a1ee2d3SJung-uk Kim { 6498a1ee2d3SJung-uk Kim 6508a1ee2d3SJung-uk Kim return (atomic_testandset_int((volatile u_int *)p, v)); 6518a1ee2d3SJung-uk Kim } 6528a1ee2d3SJung-uk Kim 653dfdc9a05SSepherosa Ziehau static __inline int 654dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v) 655dfdc9a05SSepherosa Ziehau { 656dfdc9a05SSepherosa Ziehau 657dfdc9a05SSepherosa Ziehau return (atomic_testandclear_int((volatile u_int *)p, v)); 658dfdc9a05SSepherosa Ziehau } 659dfdc9a05SSepherosa Ziehau 6608a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */ 66148281036SJohn Baldwin static __inline u_int 6628a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v) 66348281036SJohn Baldwin { 66448281036SJohn Baldwin 66548281036SJohn Baldwin __asm __volatile( 66648281036SJohn Baldwin " xchgl %1,%0 ; " 6678a1ee2d3SJung-uk Kim "# atomic_swap_int" 6688a1ee2d3SJung-uk Kim : "+r" (v), /* 0 */ 669fe94be3dSJung-uk Kim "+m" (*p)); /* 1 */ 6708a1ee2d3SJung-uk Kim return (v); 67148281036SJohn Baldwin } 67248281036SJohn Baldwin 67348281036SJohn Baldwin static __inline u_long 6748a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v) 67548281036SJohn Baldwin { 67648281036SJohn Baldwin 6778a1ee2d3SJung-uk Kim return (atomic_swap_int((volatile u_int *)p, (u_int)v)); 67848281036SJohn Baldwin } 67948281036SJohn Baldwin 68086d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 68186d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 68286d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 68386d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 68486d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 68586d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 68686d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 68786d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 6883d673254SMark Johnston #define atomic_cmpset_acq_char atomic_cmpset_char 6893d673254SMark Johnston #define atomic_cmpset_rel_char atomic_cmpset_char 6903d673254SMark Johnston #define atomic_fcmpset_acq_char atomic_fcmpset_char 6913d673254SMark Johnston #define atomic_fcmpset_rel_char atomic_fcmpset_char 6928a6b1c8fSJohn Baldwin 69386d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 69486d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 69586d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 69686d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 69786d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 69886d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 69986d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 70086d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 7013d673254SMark Johnston #define atomic_cmpset_acq_short atomic_cmpset_short 7023d673254SMark Johnston #define atomic_cmpset_rel_short atomic_cmpset_short 7033d673254SMark Johnston #define atomic_fcmpset_acq_short atomic_fcmpset_short 7043d673254SMark Johnston #define atomic_fcmpset_rel_short atomic_fcmpset_short 7058a6b1c8fSJohn Baldwin 70686d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 70786d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 70886d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 70986d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 71086d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 71186d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 71286d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 71386d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 7148448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int 7158448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int 716e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_int atomic_fcmpset_int 717e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_int atomic_fcmpset_int 7188a6b1c8fSJohn Baldwin 71986d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 72086d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 72186d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 72286d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 72386d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 72486d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 72586d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 72686d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 7278448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long 7288448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long 729e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_long atomic_fcmpset_long 730e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_long atomic_fcmpset_long 7318a6b1c8fSJohn Baldwin 7328a1ee2d3SJung-uk Kim #define atomic_readandclear_int(p) atomic_swap_int(p, 0) 7338a1ee2d3SJung-uk Kim #define atomic_readandclear_long(p) atomic_swap_long(p, 0) 734ca0ec73cSConrad Meyer #define atomic_testandset_acq_long atomic_testandset_long 7358a1ee2d3SJung-uk Kim 73648281036SJohn Baldwin /* Operations on 8-bit bytes. */ 7378a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 7388a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 7398a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 7408a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 7418a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 7428a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 7438a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 7448a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 7458a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 7468a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 7478a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 7488a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 7498a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 7508a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 7513d673254SMark Johnston #define atomic_cmpset_8 atomic_cmpset_char 7523d673254SMark Johnston #define atomic_cmpset_acq_8 atomic_cmpset_acq_char 7533d673254SMark Johnston #define atomic_cmpset_rel_8 atomic_cmpset_rel_char 7543d673254SMark Johnston #define atomic_fcmpset_8 atomic_fcmpset_char 7553d673254SMark Johnston #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char 7563d673254SMark Johnston #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char 7578a6b1c8fSJohn Baldwin 75848281036SJohn Baldwin /* Operations on 16-bit words. */ 7598a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 7608a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 7618a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 7628a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 7638a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 7648a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 7658a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 7668a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 7678a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 7688a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 7698a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 7708a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 7718a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 7728a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 7733d673254SMark Johnston #define atomic_cmpset_16 atomic_cmpset_short 7743d673254SMark Johnston #define atomic_cmpset_acq_16 atomic_cmpset_acq_short 7753d673254SMark Johnston #define atomic_cmpset_rel_16 atomic_cmpset_rel_short 7763d673254SMark Johnston #define atomic_fcmpset_16 atomic_fcmpset_short 7773d673254SMark Johnston #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short 7783d673254SMark Johnston #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short 7798a6b1c8fSJohn Baldwin 78048281036SJohn Baldwin /* Operations on 32-bit double words. */ 7818a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 7828a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 7838a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 7848a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 7858a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 7868a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 7878a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 7888a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 7898a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 7908a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 7918a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 7928a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 7938a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 7948a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 7958a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 7968a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 7978a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 798e7a98aefSMateusz Guzik #define atomic_fcmpset_32 atomic_fcmpset_int 799e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int 800e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int 8018a1ee2d3SJung-uk Kim #define atomic_swap_32 atomic_swap_int 8028a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 8033c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 8048a1ee2d3SJung-uk Kim #define atomic_testandset_32 atomic_testandset_int 805dfdc9a05SSepherosa Ziehau #define atomic_testandclear_32 atomic_testandclear_int 8068a6b1c8fSJohn Baldwin 807db8bee42SAndriy Gapon #ifdef _KERNEL 80843bb1274SHans Petter Selasky /* Operations on 64-bit quad words. */ 80943bb1274SHans Petter Selasky #define atomic_cmpset_acq_64 atomic_cmpset_64 81043bb1274SHans Petter Selasky #define atomic_cmpset_rel_64 atomic_cmpset_64 81125a1e0f6SHans Petter Selasky #define atomic_fcmpset_acq_64 atomic_fcmpset_64 81225a1e0f6SHans Petter Selasky #define atomic_fcmpset_rel_64 atomic_fcmpset_64 81343bb1274SHans Petter Selasky #define atomic_fetchadd_acq_64 atomic_fetchadd_64 81443bb1274SHans Petter Selasky #define atomic_fetchadd_rel_64 atomic_fetchadd_64 81543bb1274SHans Petter Selasky #define atomic_add_acq_64 atomic_add_64 81643bb1274SHans Petter Selasky #define atomic_add_rel_64 atomic_add_64 81743bb1274SHans Petter Selasky #define atomic_subtract_acq_64 atomic_subtract_64 81843bb1274SHans Petter Selasky #define atomic_subtract_rel_64 atomic_subtract_64 8193a91d106SKonstantin Belousov #define atomic_load_64 atomic_load_acq_64 8203a91d106SKonstantin Belousov #define atomic_store_64 atomic_store_rel_64 821db8bee42SAndriy Gapon #endif 82243bb1274SHans Petter Selasky 82348281036SJohn Baldwin /* Operations on pointers. */ 8246f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 8256f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 8266f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 8276f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 8286f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 8296f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 8306f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 8316f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 8326f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 8336f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 8346f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 8356f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 8366f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 8376f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 8386f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 8396f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 8406f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 8416f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 8426f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 8436f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 8446f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 8456f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 8466f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 8476f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 8486f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 849*5e9a82e8SOlivier Certner atomic_load_acq_int((const volatile u_int *)(p)) 8506f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 8516f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 8526f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 8536f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 8546f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 8556c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 8566c296ffaSBruce Evans (u_int)(new)) 8576f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 8586c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 8596c296ffaSBruce Evans (u_int)(new)) 860e7a98aefSMateusz Guzik #define atomic_fcmpset_ptr(dst, old, new) \ 861e7a98aefSMateusz Guzik atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new)) 862e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_ptr(dst, old, new) \ 863e7a98aefSMateusz Guzik atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \ 864e7a98aefSMateusz Guzik (u_int)(new)) 865e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_ptr(dst, old, new) \ 866e7a98aefSMateusz Guzik atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \ 867e7a98aefSMateusz Guzik (u_int)(new)) 8688a1ee2d3SJung-uk Kim #define atomic_swap_ptr(p, v) \ 8698a1ee2d3SJung-uk Kim atomic_swap_int((volatile u_int *)(p), (u_int)(v)) 8706f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 8716f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 872a80b9ee1SJohn Baldwin #define atomic_testandclear_ptr(p, val) \ 873a80b9ee1SJohn Baldwin atomic_testandclear_int((volatile u_int *)(p), (val)) 874a80b9ee1SJohn Baldwin #define atomic_testandset_ptr(p, val) \ 875a80b9ee1SJohn Baldwin atomic_testandset_int((volatile u_int *)(p), (val)) 876ccbdd9eeSJohn Baldwin 877f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 8786c296ffaSBruce Evans 87948cae112SKonstantin Belousov #if defined(_KERNEL) 88048cae112SKonstantin Belousov #define mb() __mbk() 88148cae112SKonstantin Belousov #define wmb() __mbk() 88248cae112SKonstantin Belousov #define rmb() __mbk() 88348cae112SKonstantin Belousov #else 88448cae112SKonstantin Belousov #define mb() __mbu() 88548cae112SKonstantin Belousov #define wmb() __mbu() 88648cae112SKonstantin Belousov #define rmb() __mbu() 88748cae112SKonstantin Belousov #endif 88848cae112SKonstantin Belousov 889069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 890