1069e9bc1SDoug Rabson /*- 2*4d846d26SWarner Losh * SPDX-License-Identifier: BSD-2-Clause 383ef78beSPedro F. Giffuni * 4069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 5069e9bc1SDoug Rabson * All rights reserved. 6069e9bc1SDoug Rabson * 7069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 8069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 9069e9bc1SDoug Rabson * are met: 10069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 12069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 13069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 14069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 15069e9bc1SDoug Rabson * 16069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 17069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 20069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 22069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 23069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26069e9bc1SDoug Rabson * SUCH DAMAGE. 27069e9bc1SDoug Rabson * 28c3aac50fSPeter Wemm * $FreeBSD$ 29069e9bc1SDoug Rabson */ 30069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 31069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 32069e9bc1SDoug Rabson 3330d4f9e8SKonstantin Belousov #include <sys/atomic_common.h> 3430d4f9e8SKonstantin Belousov 353264fd70SJung-uk Kim #ifdef _KERNEL 363264fd70SJung-uk Kim #include <machine/md_var.h> 373264fd70SJung-uk Kim #include <machine/specialreg.h> 383264fd70SJung-uk Kim #endif 393264fd70SJung-uk Kim 4048cae112SKonstantin Belousov #ifndef __OFFSETOF_MONITORBUF 4148cae112SKonstantin Belousov /* 4248cae112SKonstantin Belousov * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf). 4348cae112SKonstantin Belousov * 4448cae112SKonstantin Belousov * The open-coded number is used instead of the symbolic expression to 4548cae112SKonstantin Belousov * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers. 4648cae112SKonstantin Belousov * An assertion in i386/vm_machdep.c ensures that the value is correct. 4748cae112SKonstantin Belousov */ 4883c9dea1SGleb Smirnoff #define __OFFSETOF_MONITORBUF 0x80 4948cae112SKonstantin Belousov 5048cae112SKonstantin Belousov static __inline void 5148cae112SKonstantin Belousov __mbk(void) 5248cae112SKonstantin Belousov { 5348cae112SKonstantin Belousov 5448cae112SKonstantin Belousov __asm __volatile("lock; addl $0,%%fs:%0" 5548cae112SKonstantin Belousov : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc"); 5648cae112SKonstantin Belousov } 5748cae112SKonstantin Belousov 5848cae112SKonstantin Belousov static __inline void 5948cae112SKonstantin Belousov __mbu(void) 6048cae112SKonstantin Belousov { 6148cae112SKonstantin Belousov 6248cae112SKonstantin Belousov __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc"); 6348cae112SKonstantin Belousov } 6448cae112SKonstantin Belousov #endif 65db7f0b97SKip Macy 66069e9bc1SDoug Rabson /* 67f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 68f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 69069e9bc1SDoug Rabson * 7047b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 7147b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 7247b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 7347b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 7447b8bc92SAlan Cox * 7547b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 7647b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 7747b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 7847b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 7947b8bc92SAlan Cox * 8047b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 8147b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 8247b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 8347b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 848a1ee2d3SJung-uk Kim * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);) 85f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 8647b8bc92SAlan Cox * 8747b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 8847b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 8947b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 9047b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 918a1ee2d3SJung-uk Kim * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);) 92f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 93069e9bc1SDoug Rabson */ 94069e9bc1SDoug Rabson 952a89a48fSJohn Baldwin /* 969c0b759bSKonstantin Belousov * Always use lock prefixes. The result is slighly less optimal for 979c0b759bSKonstantin Belousov * UP systems, but it matters less now, and sometimes UP is emulated 989c0b759bSKonstantin Belousov * over SMP. 999c0b759bSKonstantin Belousov * 10086d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 10186d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 10286d2e48cSAttilio Rao * in order to avoid that for memory barriers. 10347b8bc92SAlan Cox */ 104e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10547b8bc92SAlan Cox static __inline void \ 10603e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10747b8bc92SAlan Cox { \ 1089c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \ 109fe94be3dSJung-uk Kim : "+m" (*p) \ 110fe94be3dSJung-uk Kim : CONS (V) \ 1117222d2fbSKonstantin Belousov : "cc"); \ 1126d800f89SBruce Evans } \ 11386d2e48cSAttilio Rao \ 11486d2e48cSAttilio Rao static __inline void \ 11586d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11686d2e48cSAttilio Rao { \ 1179c0b759bSKonstantin Belousov __asm __volatile("lock; " OP \ 118fe94be3dSJung-uk Kim : "+m" (*p) \ 119fe94be3dSJung-uk Kim : CONS (V) \ 1207222d2fbSKonstantin Belousov : "memory", "cc"); \ 12186d2e48cSAttilio Rao } \ 1226d800f89SBruce Evans struct __hack 1234c5aee92SMark Murray 124819e370cSPoul-Henning Kamp /* 1253d673254SMark Johnston * Atomic compare and set, used by the mutex functions. 126819e370cSPoul-Henning Kamp * 1273d673254SMark Johnston * cmpset: 1283d673254SMark Johnston * if (*dst == expect) 1293d673254SMark Johnston * *dst = src 130819e370cSPoul-Henning Kamp * 1313d673254SMark Johnston * fcmpset: 1323d673254SMark Johnston * if (*dst == *expect) 1333d673254SMark Johnston * *dst = src 1343d673254SMark Johnston * else 1353d673254SMark Johnston * *expect = *dst 1363d673254SMark Johnston * 1373d673254SMark Johnston * Returns 0 on failure, non-zero on success. 138819e370cSPoul-Henning Kamp */ 1395788c2bdSMark Johnston #define ATOMIC_CMPSET(TYPE, CONS) \ 1403d673254SMark Johnston static __inline int \ 1413d673254SMark Johnston atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \ 1423d673254SMark Johnston { \ 1433d673254SMark Johnston u_char res; \ 1443d673254SMark Johnston \ 1453d673254SMark Johnston __asm __volatile( \ 1469c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \ 1473d673254SMark Johnston " sete %0 ; " \ 1483d673254SMark Johnston "# atomic_cmpset_" #TYPE " " \ 1493d673254SMark Johnston : "=q" (res), /* 0 */ \ 1503d673254SMark Johnston "+m" (*dst), /* 1 */ \ 1513d673254SMark Johnston "+a" (expect) /* 2 */ \ 1525788c2bdSMark Johnston : CONS (src) /* 3 */ \ 1533d673254SMark Johnston : "memory", "cc"); \ 1543d673254SMark Johnston return (res); \ 1553d673254SMark Johnston } \ 1563d673254SMark Johnston \ 1573d673254SMark Johnston static __inline int \ 1583d673254SMark Johnston atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \ 1593d673254SMark Johnston { \ 1603d673254SMark Johnston u_char res; \ 1613d673254SMark Johnston \ 1623d673254SMark Johnston __asm __volatile( \ 1639c0b759bSKonstantin Belousov " lock; cmpxchg %3,%1 ; " \ 1643d673254SMark Johnston " sete %0 ; " \ 1653d673254SMark Johnston "# atomic_fcmpset_" #TYPE " " \ 1663d673254SMark Johnston : "=q" (res), /* 0 */ \ 1673d673254SMark Johnston "+m" (*dst), /* 1 */ \ 1683d673254SMark Johnston "+a" (*expect) /* 2 */ \ 1695788c2bdSMark Johnston : CONS (src) /* 3 */ \ 1703d673254SMark Johnston : "memory", "cc"); \ 1713d673254SMark Johnston return (res); \ 1728448afceSAttilio Rao } 1734c5aee92SMark Murray 1745788c2bdSMark Johnston ATOMIC_CMPSET(char, "q"); 1755788c2bdSMark Johnston ATOMIC_CMPSET(short, "r"); 1765788c2bdSMark Johnston ATOMIC_CMPSET(int, "r"); 177e7a98aefSMateusz Guzik 1783c2bc2bfSJohn Baldwin /* 1793c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 1803c2bc2bfSJohn Baldwin * the previous value of *p. 1813c2bc2bfSJohn Baldwin */ 1823c2bc2bfSJohn Baldwin static __inline u_int 1833c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 1843c2bc2bfSJohn Baldwin { 1853c2bc2bfSJohn Baldwin 1863c2bc2bfSJohn Baldwin __asm __volatile( 1879c0b759bSKonstantin Belousov " lock; xaddl %0,%1 ; " 1883c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 189ee93d117SJung-uk Kim : "+r" (v), /* 0 */ 190fe94be3dSJung-uk Kim "+m" (*p) /* 1 */ 191fe94be3dSJung-uk Kim : : "cc"); 1923c2bc2bfSJohn Baldwin return (v); 1933c2bc2bfSJohn Baldwin } 1943c2bc2bfSJohn Baldwin 1958a1ee2d3SJung-uk Kim static __inline int 1968a1ee2d3SJung-uk Kim atomic_testandset_int(volatile u_int *p, u_int v) 1978a1ee2d3SJung-uk Kim { 1988a1ee2d3SJung-uk Kim u_char res; 1998a1ee2d3SJung-uk Kim 2008a1ee2d3SJung-uk Kim __asm __volatile( 2019c0b759bSKonstantin Belousov " lock; btsl %2,%1 ; " 2028a1ee2d3SJung-uk Kim " setc %0 ; " 2038a1ee2d3SJung-uk Kim "# atomic_testandset_int" 2048a1ee2d3SJung-uk Kim : "=q" (res), /* 0 */ 2058a1ee2d3SJung-uk Kim "+m" (*p) /* 1 */ 2068a1ee2d3SJung-uk Kim : "Ir" (v & 0x1f) /* 2 */ 2078a1ee2d3SJung-uk Kim : "cc"); 2088a1ee2d3SJung-uk Kim return (res); 2098a1ee2d3SJung-uk Kim } 2108a1ee2d3SJung-uk Kim 211dfdc9a05SSepherosa Ziehau static __inline int 212dfdc9a05SSepherosa Ziehau atomic_testandclear_int(volatile u_int *p, u_int v) 213dfdc9a05SSepherosa Ziehau { 214dfdc9a05SSepherosa Ziehau u_char res; 215dfdc9a05SSepherosa Ziehau 216dfdc9a05SSepherosa Ziehau __asm __volatile( 2179c0b759bSKonstantin Belousov " lock; btrl %2,%1 ; " 218dfdc9a05SSepherosa Ziehau " setc %0 ; " 219dfdc9a05SSepherosa Ziehau "# atomic_testandclear_int" 220dfdc9a05SSepherosa Ziehau : "=q" (res), /* 0 */ 221dfdc9a05SSepherosa Ziehau "+m" (*p) /* 1 */ 222dfdc9a05SSepherosa Ziehau : "Ir" (v & 0x1f) /* 2 */ 223dfdc9a05SSepherosa Ziehau : "cc"); 224dfdc9a05SSepherosa Ziehau return (res); 225dfdc9a05SSepherosa Ziehau } 226dfdc9a05SSepherosa Ziehau 227fa9f322dSKonstantin Belousov /* 228fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the 229fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics. 230fa9f322dSKonstantin Belousov * 2317626d062SKonstantin Belousov * However, a load may pass a store if they are performed on distinct 232dd5b6425SKonstantin Belousov * addresses, so we need Store/Load barrier for sequentially 233dd5b6425SKonstantin Belousov * consistent fences in SMP kernels. We use "lock addl $0,mem" for a 234dd5b6425SKonstantin Belousov * Store/Load barrier, as recommended by the AMD Software Optimization 235dd5b6425SKonstantin Belousov * Guide, and not mfence. In the kernel, we use a private per-cpu 2360b6476ecSKonstantin Belousov * cache line for "mem", to avoid introducing false data 2370b6476ecSKonstantin Belousov * dependencies. In user space, we use the word at the top of the 2380b6476ecSKonstantin Belousov * stack. 2397626d062SKonstantin Belousov * 2407626d062SKonstantin Belousov * For UP kernels, however, the memory of the single processor is 2417626d062SKonstantin Belousov * always consistent, so we only need to stop the compiler from 2427626d062SKonstantin Belousov * reordering accesses in a way that violates the semantics of acquire 2437626d062SKonstantin Belousov * and release. 244fa9f322dSKonstantin Belousov */ 24548cae112SKonstantin Belousov 2467626d062SKonstantin Belousov #if defined(_KERNEL) 24748cae112SKonstantin Belousov #define __storeload_barrier() __mbk() 2487626d062SKonstantin Belousov #else /* !_KERNEL */ 24948cae112SKonstantin Belousov #define __storeload_barrier() __mbu() 2507626d062SKonstantin Belousov #endif /* _KERNEL*/ 2517626d062SKonstantin Belousov 2527626d062SKonstantin Belousov #define ATOMIC_LOAD(TYPE) \ 2539d979d89SJohn Baldwin static __inline u_##TYPE \ 2549d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 2559d979d89SJohn Baldwin { \ 2569d979d89SJohn Baldwin u_##TYPE res; \ 2579d979d89SJohn Baldwin \ 2587626d062SKonstantin Belousov res = *p; \ 2597626d062SKonstantin Belousov __compiler_membar(); \ 2609d979d89SJohn Baldwin return (res); \ 2619d979d89SJohn Baldwin } \ 2626d800f89SBruce Evans struct __hack 2634c5aee92SMark Murray 2647626d062SKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 2657626d062SKonstantin Belousov static __inline void \ 2667626d062SKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \ 2677626d062SKonstantin Belousov { \ 2687626d062SKonstantin Belousov \ 2697626d062SKonstantin Belousov __compiler_membar(); \ 2707626d062SKonstantin Belousov *p = v; \ 2717626d062SKonstantin Belousov } \ 2727626d062SKonstantin Belousov struct __hack 2734c5aee92SMark Murray 2748954a9a4SKonstantin Belousov static __inline void 2758954a9a4SKonstantin Belousov atomic_thread_fence_acq(void) 2768954a9a4SKonstantin Belousov { 2778954a9a4SKonstantin Belousov 2788954a9a4SKonstantin Belousov __compiler_membar(); 2798954a9a4SKonstantin Belousov } 2808954a9a4SKonstantin Belousov 2818954a9a4SKonstantin Belousov static __inline void 2828954a9a4SKonstantin Belousov atomic_thread_fence_rel(void) 2838954a9a4SKonstantin Belousov { 2848954a9a4SKonstantin Belousov 2858954a9a4SKonstantin Belousov __compiler_membar(); 2868954a9a4SKonstantin Belousov } 2878954a9a4SKonstantin Belousov 2888954a9a4SKonstantin Belousov static __inline void 2898954a9a4SKonstantin Belousov atomic_thread_fence_acq_rel(void) 2908954a9a4SKonstantin Belousov { 2918954a9a4SKonstantin Belousov 2928954a9a4SKonstantin Belousov __compiler_membar(); 2938954a9a4SKonstantin Belousov } 2948954a9a4SKonstantin Belousov 2958954a9a4SKonstantin Belousov static __inline void 2968954a9a4SKonstantin Belousov atomic_thread_fence_seq_cst(void) 2978954a9a4SKonstantin Belousov { 2988954a9a4SKonstantin Belousov 2998954a9a4SKonstantin Belousov __storeload_barrier(); 3008954a9a4SKonstantin Belousov } 3018954a9a4SKonstantin Belousov 3023264fd70SJung-uk Kim #ifdef _KERNEL 3033264fd70SJung-uk Kim 3043264fd70SJung-uk Kim #ifdef WANT_FUNCTIONS 3055188b5f3SJung-uk Kim int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t); 3065188b5f3SJung-uk Kim int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t); 3073264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i386(volatile uint64_t *); 3083264fd70SJung-uk Kim uint64_t atomic_load_acq_64_i586(volatile uint64_t *); 3093264fd70SJung-uk Kim void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t); 3103264fd70SJung-uk Kim void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t); 3115188b5f3SJung-uk Kim uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t); 3125188b5f3SJung-uk Kim uint64_t atomic_swap_64_i586(volatile uint64_t *, uint64_t); 3133264fd70SJung-uk Kim #endif 3143264fd70SJung-uk Kim 3153264fd70SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */ 3165188b5f3SJung-uk Kim static __inline int 3175188b5f3SJung-uk Kim atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src) 3185188b5f3SJung-uk Kim { 3195188b5f3SJung-uk Kim volatile uint32_t *p; 3205188b5f3SJung-uk Kim u_char res; 3215188b5f3SJung-uk Kim 3225188b5f3SJung-uk Kim p = (volatile uint32_t *)dst; 3235188b5f3SJung-uk Kim __asm __volatile( 3245188b5f3SJung-uk Kim " pushfl ; " 3255188b5f3SJung-uk Kim " cli ; " 3265188b5f3SJung-uk Kim " xorl %1,%%eax ; " 3275188b5f3SJung-uk Kim " xorl %2,%%edx ; " 3285188b5f3SJung-uk Kim " orl %%edx,%%eax ; " 3295188b5f3SJung-uk Kim " jne 1f ; " 3305188b5f3SJung-uk Kim " movl %4,%1 ; " 3315188b5f3SJung-uk Kim " movl %5,%2 ; " 3325188b5f3SJung-uk Kim "1: " 3335188b5f3SJung-uk Kim " sete %3 ; " 3345188b5f3SJung-uk Kim " popfl" 3355188b5f3SJung-uk Kim : "+A" (expect), /* 0 */ 3365188b5f3SJung-uk Kim "+m" (*p), /* 1 */ 3375188b5f3SJung-uk Kim "+m" (*(p + 1)), /* 2 */ 3385188b5f3SJung-uk Kim "=q" (res) /* 3 */ 3395188b5f3SJung-uk Kim : "r" ((uint32_t)src), /* 4 */ 3405188b5f3SJung-uk Kim "r" ((uint32_t)(src >> 32)) /* 5 */ 3415188b5f3SJung-uk Kim : "memory", "cc"); 3425188b5f3SJung-uk Kim return (res); 3435188b5f3SJung-uk Kim } 3445188b5f3SJung-uk Kim 34525a1e0f6SHans Petter Selasky static __inline int 34625a1e0f6SHans Petter Selasky atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 34725a1e0f6SHans Petter Selasky { 34825a1e0f6SHans Petter Selasky 34925a1e0f6SHans Petter Selasky if (atomic_cmpset_64_i386(dst, *expect, src)) { 35025a1e0f6SHans Petter Selasky return (1); 35125a1e0f6SHans Petter Selasky } else { 35225a1e0f6SHans Petter Selasky *expect = *dst; 35325a1e0f6SHans Petter Selasky return (0); 35425a1e0f6SHans Petter Selasky } 35525a1e0f6SHans Petter Selasky } 35625a1e0f6SHans Petter Selasky 3573264fd70SJung-uk Kim static __inline uint64_t 3583264fd70SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p) 3593264fd70SJung-uk Kim { 3603264fd70SJung-uk Kim volatile uint32_t *q; 3613264fd70SJung-uk Kim uint64_t res; 3623264fd70SJung-uk Kim 3633264fd70SJung-uk Kim q = (volatile uint32_t *)p; 3643264fd70SJung-uk Kim __asm __volatile( 3653264fd70SJung-uk Kim " pushfl ; " 3663264fd70SJung-uk Kim " cli ; " 3673264fd70SJung-uk Kim " movl %1,%%eax ; " 3683264fd70SJung-uk Kim " movl %2,%%edx ; " 3693264fd70SJung-uk Kim " popfl" 3703264fd70SJung-uk Kim : "=&A" (res) /* 0 */ 3713264fd70SJung-uk Kim : "m" (*q), /* 1 */ 3723264fd70SJung-uk Kim "m" (*(q + 1)) /* 2 */ 3733264fd70SJung-uk Kim : "memory"); 3743264fd70SJung-uk Kim return (res); 3753264fd70SJung-uk Kim } 3763264fd70SJung-uk Kim 3773264fd70SJung-uk Kim static __inline void 3783264fd70SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 3793264fd70SJung-uk Kim { 3803264fd70SJung-uk Kim volatile uint32_t *q; 3813264fd70SJung-uk Kim 3823264fd70SJung-uk Kim q = (volatile uint32_t *)p; 3833264fd70SJung-uk Kim __asm __volatile( 3843264fd70SJung-uk Kim " pushfl ; " 3853264fd70SJung-uk Kim " cli ; " 3863264fd70SJung-uk Kim " movl %%eax,%0 ; " 3873264fd70SJung-uk Kim " movl %%edx,%1 ; " 3883264fd70SJung-uk Kim " popfl" 3893264fd70SJung-uk Kim : "=m" (*q), /* 0 */ 3903264fd70SJung-uk Kim "=m" (*(q + 1)) /* 1 */ 3913264fd70SJung-uk Kim : "A" (v) /* 2 */ 3923264fd70SJung-uk Kim : "memory"); 3933264fd70SJung-uk Kim } 3943264fd70SJung-uk Kim 3953264fd70SJung-uk Kim static __inline uint64_t 3965188b5f3SJung-uk Kim atomic_swap_64_i386(volatile uint64_t *p, uint64_t v) 3975188b5f3SJung-uk Kim { 3985188b5f3SJung-uk Kim volatile uint32_t *q; 3995188b5f3SJung-uk Kim uint64_t res; 4005188b5f3SJung-uk Kim 4015188b5f3SJung-uk Kim q = (volatile uint32_t *)p; 4025188b5f3SJung-uk Kim __asm __volatile( 4035188b5f3SJung-uk Kim " pushfl ; " 4045188b5f3SJung-uk Kim " cli ; " 4055188b5f3SJung-uk Kim " movl %1,%%eax ; " 4065188b5f3SJung-uk Kim " movl %2,%%edx ; " 4075188b5f3SJung-uk Kim " movl %4,%2 ; " 4085188b5f3SJung-uk Kim " movl %3,%1 ; " 4095188b5f3SJung-uk Kim " popfl" 4105188b5f3SJung-uk Kim : "=&A" (res), /* 0 */ 4115188b5f3SJung-uk Kim "+m" (*q), /* 1 */ 4125188b5f3SJung-uk Kim "+m" (*(q + 1)) /* 2 */ 4135188b5f3SJung-uk Kim : "r" ((uint32_t)v), /* 3 */ 4145188b5f3SJung-uk Kim "r" ((uint32_t)(v >> 32))); /* 4 */ 4155188b5f3SJung-uk Kim return (res); 4165188b5f3SJung-uk Kim } 4175188b5f3SJung-uk Kim 4185188b5f3SJung-uk Kim static __inline int 4195188b5f3SJung-uk Kim atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src) 4205188b5f3SJung-uk Kim { 4215188b5f3SJung-uk Kim u_char res; 4225188b5f3SJung-uk Kim 4235188b5f3SJung-uk Kim __asm __volatile( 4249c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; " 4255188b5f3SJung-uk Kim " sete %0" 4265188b5f3SJung-uk Kim : "=q" (res), /* 0 */ 4275188b5f3SJung-uk Kim "+m" (*dst), /* 1 */ 4285188b5f3SJung-uk Kim "+A" (expect) /* 2 */ 4295188b5f3SJung-uk Kim : "b" ((uint32_t)src), /* 3 */ 4305188b5f3SJung-uk Kim "c" ((uint32_t)(src >> 32)) /* 4 */ 4315188b5f3SJung-uk Kim : "memory", "cc"); 4325188b5f3SJung-uk Kim return (res); 4335188b5f3SJung-uk Kim } 4345188b5f3SJung-uk Kim 43525a1e0f6SHans Petter Selasky static __inline int 43625a1e0f6SHans Petter Selasky atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 43725a1e0f6SHans Petter Selasky { 43825a1e0f6SHans Petter Selasky u_char res; 43925a1e0f6SHans Petter Selasky 44025a1e0f6SHans Petter Selasky __asm __volatile( 4419c0b759bSKonstantin Belousov " lock; cmpxchg8b %1 ; " 44225a1e0f6SHans Petter Selasky " sete %0" 44325a1e0f6SHans Petter Selasky : "=q" (res), /* 0 */ 44425a1e0f6SHans Petter Selasky "+m" (*dst), /* 1 */ 44525a1e0f6SHans Petter Selasky "+A" (*expect) /* 2 */ 44625a1e0f6SHans Petter Selasky : "b" ((uint32_t)src), /* 3 */ 44725a1e0f6SHans Petter Selasky "c" ((uint32_t)(src >> 32)) /* 4 */ 44825a1e0f6SHans Petter Selasky : "memory", "cc"); 44925a1e0f6SHans Petter Selasky return (res); 45025a1e0f6SHans Petter Selasky } 45125a1e0f6SHans Petter Selasky 4525188b5f3SJung-uk Kim static __inline uint64_t 4533264fd70SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p) 4543264fd70SJung-uk Kim { 4553264fd70SJung-uk Kim uint64_t res; 4563264fd70SJung-uk Kim 4573264fd70SJung-uk Kim __asm __volatile( 4583264fd70SJung-uk Kim " movl %%ebx,%%eax ; " 4593264fd70SJung-uk Kim " movl %%ecx,%%edx ; " 4609c0b759bSKonstantin Belousov " lock; cmpxchg8b %1" 4613264fd70SJung-uk Kim : "=&A" (res), /* 0 */ 4623264fd70SJung-uk Kim "+m" (*p) /* 1 */ 4633264fd70SJung-uk Kim : : "memory", "cc"); 4643264fd70SJung-uk Kim return (res); 4653264fd70SJung-uk Kim } 4663264fd70SJung-uk Kim 4673264fd70SJung-uk Kim static __inline void 4683264fd70SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 4693264fd70SJung-uk Kim { 4703264fd70SJung-uk Kim 4713264fd70SJung-uk Kim __asm __volatile( 4723264fd70SJung-uk Kim " movl %%eax,%%ebx ; " 4733264fd70SJung-uk Kim " movl %%edx,%%ecx ; " 4743264fd70SJung-uk Kim "1: " 4759c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; " 4763264fd70SJung-uk Kim " jne 1b" 4773264fd70SJung-uk Kim : "+m" (*p), /* 0 */ 4783264fd70SJung-uk Kim "+A" (v) /* 1 */ 4793264fd70SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 4803264fd70SJung-uk Kim } 4813264fd70SJung-uk Kim 4823264fd70SJung-uk Kim static __inline uint64_t 4835188b5f3SJung-uk Kim atomic_swap_64_i586(volatile uint64_t *p, uint64_t v) 4845188b5f3SJung-uk Kim { 4855188b5f3SJung-uk Kim 4865188b5f3SJung-uk Kim __asm __volatile( 4875188b5f3SJung-uk Kim " movl %%eax,%%ebx ; " 4885188b5f3SJung-uk Kim " movl %%edx,%%ecx ; " 4895188b5f3SJung-uk Kim "1: " 4909c0b759bSKonstantin Belousov " lock; cmpxchg8b %0 ; " 4915188b5f3SJung-uk Kim " jne 1b" 4925188b5f3SJung-uk Kim : "+m" (*p), /* 0 */ 4935188b5f3SJung-uk Kim "+A" (v) /* 1 */ 4945188b5f3SJung-uk Kim : : "ebx", "ecx", "memory", "cc"); 4955188b5f3SJung-uk Kim return (v); 4965188b5f3SJung-uk Kim } 4975188b5f3SJung-uk Kim 4985188b5f3SJung-uk Kim static __inline int 4995188b5f3SJung-uk Kim atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src) 5005188b5f3SJung-uk Kim { 5015188b5f3SJung-uk Kim 5025188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5035188b5f3SJung-uk Kim return (atomic_cmpset_64_i386(dst, expect, src)); 5045188b5f3SJung-uk Kim else 5055188b5f3SJung-uk Kim return (atomic_cmpset_64_i586(dst, expect, src)); 5065188b5f3SJung-uk Kim } 5075188b5f3SJung-uk Kim 50825a1e0f6SHans Petter Selasky static __inline int 50925a1e0f6SHans Petter Selasky atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src) 51025a1e0f6SHans Petter Selasky { 51125a1e0f6SHans Petter Selasky 51225a1e0f6SHans Petter Selasky if ((cpu_feature & CPUID_CX8) == 0) 51325a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i386(dst, expect, src)); 51425a1e0f6SHans Petter Selasky else 51525a1e0f6SHans Petter Selasky return (atomic_fcmpset_64_i586(dst, expect, src)); 51625a1e0f6SHans Petter Selasky } 51725a1e0f6SHans Petter Selasky 5185188b5f3SJung-uk Kim static __inline uint64_t 5193264fd70SJung-uk Kim atomic_load_acq_64(volatile uint64_t *p) 5203264fd70SJung-uk Kim { 5213264fd70SJung-uk Kim 5223264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5233264fd70SJung-uk Kim return (atomic_load_acq_64_i386(p)); 5243264fd70SJung-uk Kim else 5253264fd70SJung-uk Kim return (atomic_load_acq_64_i586(p)); 5263264fd70SJung-uk Kim } 5273264fd70SJung-uk Kim 5283264fd70SJung-uk Kim static __inline void 5293264fd70SJung-uk Kim atomic_store_rel_64(volatile uint64_t *p, uint64_t v) 5303264fd70SJung-uk Kim { 5313264fd70SJung-uk Kim 5323264fd70SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5333264fd70SJung-uk Kim atomic_store_rel_64_i386(p, v); 5343264fd70SJung-uk Kim else 5353264fd70SJung-uk Kim atomic_store_rel_64_i586(p, v); 5363264fd70SJung-uk Kim } 5373264fd70SJung-uk Kim 5385188b5f3SJung-uk Kim static __inline uint64_t 5395188b5f3SJung-uk Kim atomic_swap_64(volatile uint64_t *p, uint64_t v) 5405188b5f3SJung-uk Kim { 5415188b5f3SJung-uk Kim 5425188b5f3SJung-uk Kim if ((cpu_feature & CPUID_CX8) == 0) 5435188b5f3SJung-uk Kim return (atomic_swap_64_i386(p, v)); 5445188b5f3SJung-uk Kim else 5455188b5f3SJung-uk Kim return (atomic_swap_64_i586(p, v)); 5465188b5f3SJung-uk Kim } 5475188b5f3SJung-uk Kim 548322f006eSHans Petter Selasky static __inline uint64_t 549322f006eSHans Petter Selasky atomic_fetchadd_64(volatile uint64_t *p, uint64_t v) 550322f006eSHans Petter Selasky { 551322f006eSHans Petter Selasky 552322f006eSHans Petter Selasky for (;;) { 553322f006eSHans Petter Selasky uint64_t t = *p; 554322f006eSHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 555322f006eSHans Petter Selasky return (t); 556322f006eSHans Petter Selasky } 557322f006eSHans Petter Selasky } 558322f006eSHans Petter Selasky 55943bb1274SHans Petter Selasky static __inline void 56043bb1274SHans Petter Selasky atomic_add_64(volatile uint64_t *p, uint64_t v) 56143bb1274SHans Petter Selasky { 56243bb1274SHans Petter Selasky uint64_t t; 56343bb1274SHans Petter Selasky 56443bb1274SHans Petter Selasky for (;;) { 56543bb1274SHans Petter Selasky t = *p; 56643bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t + v)) 56743bb1274SHans Petter Selasky break; 56843bb1274SHans Petter Selasky } 56943bb1274SHans Petter Selasky } 57043bb1274SHans Petter Selasky 57143bb1274SHans Petter Selasky static __inline void 57243bb1274SHans Petter Selasky atomic_subtract_64(volatile uint64_t *p, uint64_t v) 57343bb1274SHans Petter Selasky { 57443bb1274SHans Petter Selasky uint64_t t; 57543bb1274SHans Petter Selasky 57643bb1274SHans Petter Selasky for (;;) { 57743bb1274SHans Petter Selasky t = *p; 57843bb1274SHans Petter Selasky if (atomic_cmpset_64(p, t, t - v)) 57943bb1274SHans Petter Selasky break; 58043bb1274SHans Petter Selasky } 58143bb1274SHans Petter Selasky } 58243bb1274SHans Petter Selasky 5833264fd70SJung-uk Kim #endif /* _KERNEL */ 5843264fd70SJung-uk Kim 5858306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 5868306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 5878306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 5888306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 5898a6b1c8fSJohn Baldwin 5908306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 5918306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 5928306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 5938306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 5948a6b1c8fSJohn Baldwin 5958306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 5968306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 5978306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 5988306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 5998a6b1c8fSJohn Baldwin 6008306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 6018306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 6028306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 6038306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 6049d979d89SJohn Baldwin 6057626d062SKonstantin Belousov #define ATOMIC_LOADSTORE(TYPE) \ 6067626d062SKonstantin Belousov ATOMIC_LOAD(TYPE); \ 6077626d062SKonstantin Belousov ATOMIC_STORE(TYPE) 608fa9f322dSKonstantin Belousov 6097626d062SKonstantin Belousov ATOMIC_LOADSTORE(char); 6107626d062SKonstantin Belousov ATOMIC_LOADSTORE(short); 6117626d062SKonstantin Belousov ATOMIC_LOADSTORE(int); 6127626d062SKonstantin Belousov ATOMIC_LOADSTORE(long); 613ccbdd9eeSJohn Baldwin 6148a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 615fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD 616fa9f322dSKonstantin Belousov #undef ATOMIC_STORE 6177626d062SKonstantin Belousov #undef ATOMIC_LOADSTORE 618ccbdd9eeSJohn Baldwin 619f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 62048281036SJohn Baldwin 62148281036SJohn Baldwin static __inline int 622065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 62348281036SJohn Baldwin { 62448281036SJohn Baldwin 625065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 62648281036SJohn Baldwin (u_int)src)); 62748281036SJohn Baldwin } 62848281036SJohn Baldwin 62925a1e0f6SHans Petter Selasky static __inline int 63025a1e0f6SHans Petter Selasky atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src) 63125a1e0f6SHans Petter Selasky { 63225a1e0f6SHans Petter Selasky 63325a1e0f6SHans Petter Selasky return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect, 63425a1e0f6SHans Petter Selasky (u_int)src)); 63525a1e0f6SHans Petter Selasky } 63625a1e0f6SHans Petter Selasky 6376eb4157fSPawel Jakub Dawidek static __inline u_long 6386eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 6396eb4157fSPawel Jakub Dawidek { 6406eb4157fSPawel Jakub Dawidek 6416eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 6426eb4157fSPawel Jakub Dawidek } 6436eb4157fSPawel Jakub Dawidek 6448a1ee2d3SJung-uk Kim static __inline int 6458a1ee2d3SJung-uk Kim atomic_testandset_long(volatile u_long *p, u_int v) 6468a1ee2d3SJung-uk Kim { 6478a1ee2d3SJung-uk Kim 6488a1ee2d3SJung-uk Kim return (atomic_testandset_int((volatile u_int *)p, v)); 6498a1ee2d3SJung-uk Kim } 6508a1ee2d3SJung-uk Kim 651dfdc9a05SSepherosa Ziehau static __inline int 652dfdc9a05SSepherosa Ziehau atomic_testandclear_long(volatile u_long *p, u_int v) 653dfdc9a05SSepherosa Ziehau { 654dfdc9a05SSepherosa Ziehau 655dfdc9a05SSepherosa Ziehau return (atomic_testandclear_int((volatile u_int *)p, v)); 656dfdc9a05SSepherosa Ziehau } 657dfdc9a05SSepherosa Ziehau 6588a1ee2d3SJung-uk Kim /* Read the current value and store a new value in the destination. */ 65948281036SJohn Baldwin static __inline u_int 6608a1ee2d3SJung-uk Kim atomic_swap_int(volatile u_int *p, u_int v) 66148281036SJohn Baldwin { 66248281036SJohn Baldwin 66348281036SJohn Baldwin __asm __volatile( 66448281036SJohn Baldwin " xchgl %1,%0 ; " 6658a1ee2d3SJung-uk Kim "# atomic_swap_int" 6668a1ee2d3SJung-uk Kim : "+r" (v), /* 0 */ 667fe94be3dSJung-uk Kim "+m" (*p)); /* 1 */ 6688a1ee2d3SJung-uk Kim return (v); 66948281036SJohn Baldwin } 67048281036SJohn Baldwin 67148281036SJohn Baldwin static __inline u_long 6728a1ee2d3SJung-uk Kim atomic_swap_long(volatile u_long *p, u_long v) 67348281036SJohn Baldwin { 67448281036SJohn Baldwin 6758a1ee2d3SJung-uk Kim return (atomic_swap_int((volatile u_int *)p, (u_int)v)); 67648281036SJohn Baldwin } 67748281036SJohn Baldwin 67886d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 67986d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 68086d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 68186d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 68286d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 68386d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 68486d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 68586d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 6863d673254SMark Johnston #define atomic_cmpset_acq_char atomic_cmpset_char 6873d673254SMark Johnston #define atomic_cmpset_rel_char atomic_cmpset_char 6883d673254SMark Johnston #define atomic_fcmpset_acq_char atomic_fcmpset_char 6893d673254SMark Johnston #define atomic_fcmpset_rel_char atomic_fcmpset_char 6908a6b1c8fSJohn Baldwin 69186d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 69286d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 69386d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 69486d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 69586d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 69686d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 69786d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 69886d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 6993d673254SMark Johnston #define atomic_cmpset_acq_short atomic_cmpset_short 7003d673254SMark Johnston #define atomic_cmpset_rel_short atomic_cmpset_short 7013d673254SMark Johnston #define atomic_fcmpset_acq_short atomic_fcmpset_short 7023d673254SMark Johnston #define atomic_fcmpset_rel_short atomic_fcmpset_short 7038a6b1c8fSJohn Baldwin 70486d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 70586d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 70686d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 70786d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 70886d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 70986d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 71086d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 71186d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 7128448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int 7138448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int 714e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_int atomic_fcmpset_int 715e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_int atomic_fcmpset_int 7168a6b1c8fSJohn Baldwin 71786d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 71886d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 71986d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 72086d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 72186d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 72286d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 72386d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 72486d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 7258448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long 7268448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long 727e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_long atomic_fcmpset_long 728e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_long atomic_fcmpset_long 7298a6b1c8fSJohn Baldwin 7308a1ee2d3SJung-uk Kim #define atomic_readandclear_int(p) atomic_swap_int(p, 0) 7318a1ee2d3SJung-uk Kim #define atomic_readandclear_long(p) atomic_swap_long(p, 0) 732ca0ec73cSConrad Meyer #define atomic_testandset_acq_long atomic_testandset_long 7338a1ee2d3SJung-uk Kim 73448281036SJohn Baldwin /* Operations on 8-bit bytes. */ 7358a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 7368a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 7378a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 7388a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 7398a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 7408a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 7418a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 7428a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 7438a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 7448a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 7458a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 7468a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 7478a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 7488a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 7493d673254SMark Johnston #define atomic_cmpset_8 atomic_cmpset_char 7503d673254SMark Johnston #define atomic_cmpset_acq_8 atomic_cmpset_acq_char 7513d673254SMark Johnston #define atomic_cmpset_rel_8 atomic_cmpset_rel_char 7523d673254SMark Johnston #define atomic_fcmpset_8 atomic_fcmpset_char 7533d673254SMark Johnston #define atomic_fcmpset_acq_8 atomic_fcmpset_acq_char 7543d673254SMark Johnston #define atomic_fcmpset_rel_8 atomic_fcmpset_rel_char 7558a6b1c8fSJohn Baldwin 75648281036SJohn Baldwin /* Operations on 16-bit words. */ 7578a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 7588a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 7598a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 7608a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 7618a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 7628a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 7638a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 7648a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 7658a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 7668a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 7678a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 7688a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 7698a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 7708a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 7713d673254SMark Johnston #define atomic_cmpset_16 atomic_cmpset_short 7723d673254SMark Johnston #define atomic_cmpset_acq_16 atomic_cmpset_acq_short 7733d673254SMark Johnston #define atomic_cmpset_rel_16 atomic_cmpset_rel_short 7743d673254SMark Johnston #define atomic_fcmpset_16 atomic_fcmpset_short 7753d673254SMark Johnston #define atomic_fcmpset_acq_16 atomic_fcmpset_acq_short 7763d673254SMark Johnston #define atomic_fcmpset_rel_16 atomic_fcmpset_rel_short 7778a6b1c8fSJohn Baldwin 77848281036SJohn Baldwin /* Operations on 32-bit double words. */ 7798a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 7808a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 7818a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 7828a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 7838a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 7848a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 7858a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 7868a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 7878a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 7888a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 7898a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 7908a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 7918a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 7928a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 7938a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 7948a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 7958a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 796e7a98aefSMateusz Guzik #define atomic_fcmpset_32 atomic_fcmpset_int 797e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int 798e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int 7998a1ee2d3SJung-uk Kim #define atomic_swap_32 atomic_swap_int 8008a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 8013c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 8028a1ee2d3SJung-uk Kim #define atomic_testandset_32 atomic_testandset_int 803dfdc9a05SSepherosa Ziehau #define atomic_testandclear_32 atomic_testandclear_int 8048a6b1c8fSJohn Baldwin 805db8bee42SAndriy Gapon #ifdef _KERNEL 80643bb1274SHans Petter Selasky /* Operations on 64-bit quad words. */ 80743bb1274SHans Petter Selasky #define atomic_cmpset_acq_64 atomic_cmpset_64 80843bb1274SHans Petter Selasky #define atomic_cmpset_rel_64 atomic_cmpset_64 80925a1e0f6SHans Petter Selasky #define atomic_fcmpset_acq_64 atomic_fcmpset_64 81025a1e0f6SHans Petter Selasky #define atomic_fcmpset_rel_64 atomic_fcmpset_64 81143bb1274SHans Petter Selasky #define atomic_fetchadd_acq_64 atomic_fetchadd_64 81243bb1274SHans Petter Selasky #define atomic_fetchadd_rel_64 atomic_fetchadd_64 81343bb1274SHans Petter Selasky #define atomic_add_acq_64 atomic_add_64 81443bb1274SHans Petter Selasky #define atomic_add_rel_64 atomic_add_64 81543bb1274SHans Petter Selasky #define atomic_subtract_acq_64 atomic_subtract_64 81643bb1274SHans Petter Selasky #define atomic_subtract_rel_64 atomic_subtract_64 8173a91d106SKonstantin Belousov #define atomic_load_64 atomic_load_acq_64 8183a91d106SKonstantin Belousov #define atomic_store_64 atomic_store_rel_64 819db8bee42SAndriy Gapon #endif 82043bb1274SHans Petter Selasky 82148281036SJohn Baldwin /* Operations on pointers. */ 8226f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 8236f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 8246f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 8256f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 8266f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 8276f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 8286f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 8296f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 8306f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 8316f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 8326f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 8336f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 8346f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 8356f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 8366f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 8376f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 8386f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 8396f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 8406f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 8416f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 8426f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 8436f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 8446f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 8456f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 8466f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 8476f0f8ccaSDag-Erling Smørgrav atomic_load_acq_int((volatile u_int *)(p)) 8486f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 8496f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 8506f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 8516f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 8526f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 8536c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 8546c296ffaSBruce Evans (u_int)(new)) 8556f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 8566c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 8576c296ffaSBruce Evans (u_int)(new)) 858e7a98aefSMateusz Guzik #define atomic_fcmpset_ptr(dst, old, new) \ 859e7a98aefSMateusz Guzik atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new)) 860e7a98aefSMateusz Guzik #define atomic_fcmpset_acq_ptr(dst, old, new) \ 861e7a98aefSMateusz Guzik atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \ 862e7a98aefSMateusz Guzik (u_int)(new)) 863e7a98aefSMateusz Guzik #define atomic_fcmpset_rel_ptr(dst, old, new) \ 864e7a98aefSMateusz Guzik atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \ 865e7a98aefSMateusz Guzik (u_int)(new)) 8668a1ee2d3SJung-uk Kim #define atomic_swap_ptr(p, v) \ 8678a1ee2d3SJung-uk Kim atomic_swap_int((volatile u_int *)(p), (u_int)(v)) 8686f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 8696f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 870ccbdd9eeSJohn Baldwin 871f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 8726c296ffaSBruce Evans 87348cae112SKonstantin Belousov #if defined(_KERNEL) 87448cae112SKonstantin Belousov #define mb() __mbk() 87548cae112SKonstantin Belousov #define wmb() __mbk() 87648cae112SKonstantin Belousov #define rmb() __mbk() 87748cae112SKonstantin Belousov #else 87848cae112SKonstantin Belousov #define mb() __mbu() 87948cae112SKonstantin Belousov #define wmb() __mbu() 88048cae112SKonstantin Belousov #define rmb() __mbu() 88148cae112SKonstantin Belousov #endif 88248cae112SKonstantin Belousov 883069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 884