1069e9bc1SDoug Rabson /*- 2069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 3069e9bc1SDoug Rabson * All rights reserved. 4069e9bc1SDoug Rabson * 5069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 6069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 7069e9bc1SDoug Rabson * are met: 8069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 9069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 10069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 12069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 13069e9bc1SDoug Rabson * 14069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24069e9bc1SDoug Rabson * SUCH DAMAGE. 25069e9bc1SDoug Rabson * 26c3aac50fSPeter Wemm * $FreeBSD$ 27069e9bc1SDoug Rabson */ 28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 30069e9bc1SDoug Rabson 31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_ 32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite 33a5f50ef9SJoerg Wunsch #endif 34a5f50ef9SJoerg Wunsch 35fa9f322dSKonstantin Belousov #define mb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 36fa9f322dSKonstantin Belousov #define wmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 37fa9f322dSKonstantin Belousov #define rmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 38db7f0b97SKip Macy 39069e9bc1SDoug Rabson /* 40f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 41f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 42069e9bc1SDoug Rabson * 4347b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 4447b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 4547b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 4647b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4747b8bc92SAlan Cox * 4847b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 4947b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 5047b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 5147b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5247b8bc92SAlan Cox * 5347b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 5447b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 5547b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 5647b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5847b8bc92SAlan Cox * 5947b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 6047b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 6147b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 6247b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 63f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 64069e9bc1SDoug Rabson */ 65069e9bc1SDoug Rabson 6647b8bc92SAlan Cox /* 6708c40841SAlan Cox * The above functions are expanded inline in the statically-linked 6808c40841SAlan Cox * kernel. Lock prefixes are generated if an SMP kernel is being 6908c40841SAlan Cox * built. 7008c40841SAlan Cox * 7108c40841SAlan Cox * Kernel modules call real functions which are built into the kernel. 7208c40841SAlan Cox * This allows kernel modules to be portable between UP and SMP systems. 7347b8bc92SAlan Cox */ 7448281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 75e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 7686d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 7786d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 7808c40841SAlan Cox 79065b12a7SPoul-Henning Kamp int atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 803c2bc2bfSJohn Baldwin u_int atomic_fetchadd_int(volatile u_int *p, u_int v); 81819e370cSPoul-Henning Kamp 82fa9f322dSKonstantin Belousov #define ATOMIC_LOAD(TYPE, LOP) \ 83fa9f322dSKonstantin Belousov u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) 84fa9f322dSKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 858306a37bSMark Murray void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 868a6b1c8fSJohn Baldwin 8748281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */ 884c5aee92SMark Murray 892a89a48fSJohn Baldwin /* 90f28e1c8fSBruce Evans * For userland, always use lock prefixes so that the binaries will run 91f28e1c8fSBruce Evans * on both SMP and !SMP systems. 922a89a48fSJohn Baldwin */ 932a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL) 947e4277e5SBruce Evans #define MPLOCKED "lock ; " 95d2f22d70SBruce Evans #else 9647b8bc92SAlan Cox #define MPLOCKED 97d2f22d70SBruce Evans #endif 98069e9bc1SDoug Rabson 9947b8bc92SAlan Cox /* 10086d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 10186d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 10286d2e48cSAttilio Rao * in order to avoid that for memory barriers. 10347b8bc92SAlan Cox */ 104e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10547b8bc92SAlan Cox static __inline void \ 10603e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10747b8bc92SAlan Cox { \ 1087e4277e5SBruce Evans __asm __volatile(MPLOCKED OP \ 10980d52f16SJohn Baldwin : "=m" (*p) \ 1107222d2fbSKonstantin Belousov : CONS (V), "m" (*p) \ 1117222d2fbSKonstantin Belousov : "cc"); \ 1126d800f89SBruce Evans } \ 11386d2e48cSAttilio Rao \ 11486d2e48cSAttilio Rao static __inline void \ 11586d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11686d2e48cSAttilio Rao { \ 11786d2e48cSAttilio Rao __asm __volatile(MPLOCKED OP \ 11886d2e48cSAttilio Rao : "=m" (*p) \ 11986d2e48cSAttilio Rao : CONS (V), "m" (*p) \ 1207222d2fbSKonstantin Belousov : "memory", "cc"); \ 12186d2e48cSAttilio Rao } \ 1226d800f89SBruce Evans struct __hack 1234c5aee92SMark Murray 124d521c6b9SJung-uk Kim #if defined(_KERNEL) && !defined(WANT_FUNCTIONS) 125d521c6b9SJung-uk Kim 126d521c6b9SJung-uk Kim /* I486 does not support SMP or CMPXCHG8B. */ 127d521c6b9SJung-uk Kim static __inline uint64_t 128d521c6b9SJung-uk Kim atomic_load_acq_64_i386(volatile uint64_t *p) 129d521c6b9SJung-uk Kim { 130d521c6b9SJung-uk Kim volatile uint32_t *high, *low; 131d521c6b9SJung-uk Kim uint64_t res; 132d521c6b9SJung-uk Kim 133d521c6b9SJung-uk Kim low = (volatile uint32_t *)p; 134d521c6b9SJung-uk Kim high = (volatile uint32_t *)p + 1; 135d521c6b9SJung-uk Kim __asm __volatile( 136d521c6b9SJung-uk Kim " pushfl ; " 137d521c6b9SJung-uk Kim " cli ; " 138d521c6b9SJung-uk Kim " movl %1,%%eax ; " 139d521c6b9SJung-uk Kim " movl %2,%%edx ; " 140d521c6b9SJung-uk Kim " popfl" 141d521c6b9SJung-uk Kim : "=&A" (res) /* 0 */ 142d521c6b9SJung-uk Kim : "m" (*low), /* 1 */ 143d521c6b9SJung-uk Kim "m" (*high) /* 2 */ 144d521c6b9SJung-uk Kim : "memory"); 145d521c6b9SJung-uk Kim 146d521c6b9SJung-uk Kim return (res); 147d521c6b9SJung-uk Kim } 148d521c6b9SJung-uk Kim 149d521c6b9SJung-uk Kim static __inline void 150d521c6b9SJung-uk Kim atomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 151d521c6b9SJung-uk Kim { 152d521c6b9SJung-uk Kim volatile uint32_t *high, *low; 153d521c6b9SJung-uk Kim 154d521c6b9SJung-uk Kim low = (volatile uint32_t *)p; 155d521c6b9SJung-uk Kim high = (volatile uint32_t *)p + 1; 156d521c6b9SJung-uk Kim __asm __volatile( 157d521c6b9SJung-uk Kim " pushfl ; " 158d521c6b9SJung-uk Kim " cli ; " 159d521c6b9SJung-uk Kim " movl %%eax,%0 ; " 160d521c6b9SJung-uk Kim " movl %%edx,%1 ; " 161d521c6b9SJung-uk Kim " popfl" 162d521c6b9SJung-uk Kim : "=m" (*low), /* 0 */ 163d521c6b9SJung-uk Kim "=m" (*high) /* 1 */ 164d521c6b9SJung-uk Kim : "A" (v) /* 2 */ 165d521c6b9SJung-uk Kim : "memory"); 166d521c6b9SJung-uk Kim } 167d521c6b9SJung-uk Kim 168d521c6b9SJung-uk Kim static __inline uint64_t 169d521c6b9SJung-uk Kim atomic_load_acq_64_i586(volatile uint64_t *p) 170d521c6b9SJung-uk Kim { 171d521c6b9SJung-uk Kim uint64_t res; 172d521c6b9SJung-uk Kim 173d521c6b9SJung-uk Kim __asm __volatile( 174d521c6b9SJung-uk Kim " movl %%ebx,%%eax ; " 175d521c6b9SJung-uk Kim " movl %%ecx,%%edx ; " 176d521c6b9SJung-uk Kim " " MPLOCKED " " 177d521c6b9SJung-uk Kim " cmpxchg8b %2" 178d521c6b9SJung-uk Kim : "=&A" (res), /* 0 */ 179d521c6b9SJung-uk Kim "=m" (*p) /* 1 */ 180d521c6b9SJung-uk Kim : "m" (*p) /* 2 */ 181d521c6b9SJung-uk Kim : "memory", "cc"); 182d521c6b9SJung-uk Kim 183d521c6b9SJung-uk Kim return (res); 184d521c6b9SJung-uk Kim } 185d521c6b9SJung-uk Kim 186d521c6b9SJung-uk Kim static __inline void 187d521c6b9SJung-uk Kim atomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 188d521c6b9SJung-uk Kim { 189d521c6b9SJung-uk Kim 190d521c6b9SJung-uk Kim __asm __volatile( 191d521c6b9SJung-uk Kim " movl %%eax,%%ebx ; " 192d521c6b9SJung-uk Kim " movl %%edx,%%ecx ; " 193d521c6b9SJung-uk Kim "1: " 194d521c6b9SJung-uk Kim " " MPLOCKED " " 195d521c6b9SJung-uk Kim " cmpxchg8b %2 ; " 196d521c6b9SJung-uk Kim " jne 1b" 197d521c6b9SJung-uk Kim : "=m" (*p), /* 0 */ 198d521c6b9SJung-uk Kim "+A" (v) /* 1 */ 199d521c6b9SJung-uk Kim : "m" (*p) /* 2 */ 200d521c6b9SJung-uk Kim : "ebx", "ecx", "memory", "cc"); 201d521c6b9SJung-uk Kim } 202d521c6b9SJung-uk Kim 203d521c6b9SJung-uk Kim #endif /* _KERNEL && !WANT_FUNCTIONS */ 204d521c6b9SJung-uk Kim 205819e370cSPoul-Henning Kamp /* 206819e370cSPoul-Henning Kamp * Atomic compare and set, used by the mutex functions 207819e370cSPoul-Henning Kamp * 208065b12a7SPoul-Henning Kamp * if (*dst == expect) *dst = src (all 32 bit words) 209819e370cSPoul-Henning Kamp * 210819e370cSPoul-Henning Kamp * Returns 0 on failure, non-zero on success 211819e370cSPoul-Henning Kamp */ 212819e370cSPoul-Henning Kamp 213f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG 2144c5aee92SMark Murray 2158448afceSAttilio Rao static __inline int 216065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 2178448afceSAttilio Rao { 2188448afceSAttilio Rao u_char res; 2198448afceSAttilio Rao 2208448afceSAttilio Rao __asm __volatile( 2218448afceSAttilio Rao " pushfl ; " 2228448afceSAttilio Rao " cli ; " 2238448afceSAttilio Rao " cmpl %3,%4 ; " 2248448afceSAttilio Rao " jne 1f ; " 2258448afceSAttilio Rao " movl %2,%1 ; " 2268448afceSAttilio Rao "1: " 2278448afceSAttilio Rao " sete %0 ; " 2288448afceSAttilio Rao " popfl ; " 2298448afceSAttilio Rao "# atomic_cmpset_int" 2308448afceSAttilio Rao : "=q" (res), /* 0 */ 2318448afceSAttilio Rao "=m" (*dst) /* 1 */ 2328448afceSAttilio Rao : "r" (src), /* 2 */ 233065b12a7SPoul-Henning Kamp "r" (expect), /* 3 */ 2348448afceSAttilio Rao "m" (*dst) /* 4 */ 2358448afceSAttilio Rao : "memory"); 2368448afceSAttilio Rao 2378448afceSAttilio Rao return (res); 2388448afceSAttilio Rao } 2394c5aee92SMark Murray 240f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */ 2414c5aee92SMark Murray 2428448afceSAttilio Rao static __inline int 243065b12a7SPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 2448448afceSAttilio Rao { 2458448afceSAttilio Rao u_char res; 2468448afceSAttilio Rao 2478448afceSAttilio Rao __asm __volatile( 2488448afceSAttilio Rao " " MPLOCKED " " 2498448afceSAttilio Rao " cmpxchgl %2,%1 ; " 2508448afceSAttilio Rao " sete %0 ; " 2518448afceSAttilio Rao "1: " 2528448afceSAttilio Rao "# atomic_cmpset_int" 2538448afceSAttilio Rao : "=a" (res), /* 0 */ 2548448afceSAttilio Rao "=m" (*dst) /* 1 */ 2558448afceSAttilio Rao : "r" (src), /* 2 */ 256065b12a7SPoul-Henning Kamp "a" (expect), /* 3 */ 2578448afceSAttilio Rao "m" (*dst) /* 4 */ 2587222d2fbSKonstantin Belousov : "memory", "cc"); 2598448afceSAttilio Rao 2608448afceSAttilio Rao return (res); 2618448afceSAttilio Rao } 2624c5aee92SMark Murray 263f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */ 2644c5aee92SMark Murray 2653c2bc2bfSJohn Baldwin /* 2663c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 2673c2bc2bfSJohn Baldwin * the previous value of *p. 2683c2bc2bfSJohn Baldwin */ 2693c2bc2bfSJohn Baldwin static __inline u_int 2703c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 2713c2bc2bfSJohn Baldwin { 2723c2bc2bfSJohn Baldwin 2733c2bc2bfSJohn Baldwin __asm __volatile( 2747e4277e5SBruce Evans " " MPLOCKED " " 2753c2bc2bfSJohn Baldwin " xaddl %0,%1 ; " 2763c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 277*ee93d117SJung-uk Kim : "+r" (v), /* 0 */ 2783c2bc2bfSJohn Baldwin "=m" (*p) /* 1 */ 2797222d2fbSKonstantin Belousov : "m" (*p) /* 2 */ 2807222d2fbSKonstantin Belousov : "cc"); 2813c2bc2bfSJohn Baldwin return (v); 2823c2bc2bfSJohn Baldwin } 2833c2bc2bfSJohn Baldwin 284fa9f322dSKonstantin Belousov /* 285fa9f322dSKonstantin Belousov * We assume that a = b will do atomic loads and stores. Due to the 286fa9f322dSKonstantin Belousov * IA32 memory model, a simple store guarantees release semantics. 287fa9f322dSKonstantin Belousov * 288fa9f322dSKonstantin Belousov * However, loads may pass stores, so for atomic_load_acq we have to 289fa9f322dSKonstantin Belousov * ensure a Store/Load barrier to do the load in SMP kernels. We use 290fa9f322dSKonstantin Belousov * "lock cmpxchg" as recommended by the AMD Software Optimization 291fa9f322dSKonstantin Belousov * Guide, and not mfence. For UP kernels, however, the cache of the 292fa9f322dSKonstantin Belousov * single processor is always consistent, so we only need to take care 293fa9f322dSKonstantin Belousov * of the compiler. 294fa9f322dSKonstantin Belousov */ 295fa9f322dSKonstantin Belousov #define ATOMIC_STORE(TYPE) \ 296fa9f322dSKonstantin Belousov static __inline void \ 297fa9f322dSKonstantin Belousov atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 298fa9f322dSKonstantin Belousov { \ 2993a473025SAttilio Rao __compiler_membar(); \ 300fa9f322dSKonstantin Belousov *p = v; \ 301fa9f322dSKonstantin Belousov } \ 302fa9f322dSKonstantin Belousov struct __hack 303fa9f322dSKonstantin Belousov 30490baa95fSJohn Baldwin #if defined(_KERNEL) && !defined(SMP) 3054c5aee92SMark Murray 306fa9f322dSKonstantin Belousov #define ATOMIC_LOAD(TYPE, LOP) \ 307ccbdd9eeSJohn Baldwin static __inline u_##TYPE \ 308ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 309ccbdd9eeSJohn Baldwin { \ 31086d2e48cSAttilio Rao u_##TYPE tmp; \ 31186d2e48cSAttilio Rao \ 31286d2e48cSAttilio Rao tmp = *p; \ 3133a473025SAttilio Rao __compiler_membar(); \ 31486d2e48cSAttilio Rao return (tmp); \ 315ccbdd9eeSJohn Baldwin } \ 3166d800f89SBruce Evans struct __hack 3174c5aee92SMark Murray 318f28e1c8fSBruce Evans #else /* !(_KERNEL && !SMP) */ 319ccbdd9eeSJohn Baldwin 320fa9f322dSKonstantin Belousov #define ATOMIC_LOAD(TYPE, LOP) \ 3219d979d89SJohn Baldwin static __inline u_##TYPE \ 3229d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 3239d979d89SJohn Baldwin { \ 3249d979d89SJohn Baldwin u_##TYPE res; \ 3259d979d89SJohn Baldwin \ 3267e4277e5SBruce Evans __asm __volatile(MPLOCKED LOP \ 327f28e1c8fSBruce Evans : "=a" (res), /* 0 */ \ 32880d52f16SJohn Baldwin "=m" (*p) /* 1 */ \ 32980d52f16SJohn Baldwin : "m" (*p) /* 2 */ \ 3307222d2fbSKonstantin Belousov : "memory", "cc"); \ 3319d979d89SJohn Baldwin \ 3329d979d89SJohn Baldwin return (res); \ 3339d979d89SJohn Baldwin } \ 3346d800f89SBruce Evans struct __hack 3354c5aee92SMark Murray 336f28e1c8fSBruce Evans #endif /* _KERNEL && !SMP */ 3374c5aee92SMark Murray 33848281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 3398a6b1c8fSJohn Baldwin 3408306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 3418306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 3428306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 3438306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 3448a6b1c8fSJohn Baldwin 3458306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 3468306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 3478306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 3488306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 3498a6b1c8fSJohn Baldwin 3508306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 3518306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 3528306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 3538306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 3548a6b1c8fSJohn Baldwin 3558306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 3568306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 3578306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 3588306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 3599d979d89SJohn Baldwin 360fa9f322dSKonstantin Belousov ATOMIC_LOAD(char, "cmpxchgb %b0,%1"); 361fa9f322dSKonstantin Belousov ATOMIC_LOAD(short, "cmpxchgw %w0,%1"); 362fa9f322dSKonstantin Belousov ATOMIC_LOAD(int, "cmpxchgl %0,%1"); 363fa9f322dSKonstantin Belousov ATOMIC_LOAD(long, "cmpxchgl %0,%1"); 364fa9f322dSKonstantin Belousov 365fa9f322dSKonstantin Belousov ATOMIC_STORE(char); 366fa9f322dSKonstantin Belousov ATOMIC_STORE(short); 367fa9f322dSKonstantin Belousov ATOMIC_STORE(int); 368fa9f322dSKonstantin Belousov ATOMIC_STORE(long); 369ccbdd9eeSJohn Baldwin 3708a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 371fa9f322dSKonstantin Belousov #undef ATOMIC_LOAD 372fa9f322dSKonstantin Belousov #undef ATOMIC_STORE 373ccbdd9eeSJohn Baldwin 374f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 37548281036SJohn Baldwin 376d521c6b9SJung-uk Kim #ifdef _KERNEL 377d521c6b9SJung-uk Kim extern uint64_t (*atomic_load_acq_64)(volatile uint64_t *); 378d521c6b9SJung-uk Kim extern void (*atomic_store_rel_64)(volatile uint64_t *, uint64_t); 379d521c6b9SJung-uk Kim #endif 380d521c6b9SJung-uk Kim 38148281036SJohn Baldwin static __inline int 382065b12a7SPoul-Henning Kamp atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 38348281036SJohn Baldwin { 38448281036SJohn Baldwin 385065b12a7SPoul-Henning Kamp return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 38648281036SJohn Baldwin (u_int)src)); 38748281036SJohn Baldwin } 38848281036SJohn Baldwin 3896eb4157fSPawel Jakub Dawidek static __inline u_long 3906eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 3916eb4157fSPawel Jakub Dawidek { 3926eb4157fSPawel Jakub Dawidek 3936eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 3946eb4157fSPawel Jakub Dawidek } 3956eb4157fSPawel Jakub Dawidek 39648281036SJohn Baldwin /* Read the current value and store a zero in the destination. */ 39748281036SJohn Baldwin #ifdef __GNUCLIKE_ASM 39848281036SJohn Baldwin 39948281036SJohn Baldwin static __inline u_int 400*ee93d117SJung-uk Kim atomic_readandclear_int(volatile u_int *p) 40148281036SJohn Baldwin { 402f28e1c8fSBruce Evans u_int res; 40348281036SJohn Baldwin 404f28e1c8fSBruce Evans res = 0; 40548281036SJohn Baldwin __asm __volatile( 40648281036SJohn Baldwin " xchgl %1,%0 ; " 40748281036SJohn Baldwin "# atomic_readandclear_int" 408f28e1c8fSBruce Evans : "+r" (res), /* 0 */ 409*ee93d117SJung-uk Kim "=m" (*p) /* 1 */ 410*ee93d117SJung-uk Kim : "m" (*p)); 41148281036SJohn Baldwin 412f28e1c8fSBruce Evans return (res); 41348281036SJohn Baldwin } 41448281036SJohn Baldwin 41548281036SJohn Baldwin static __inline u_long 416*ee93d117SJung-uk Kim atomic_readandclear_long(volatile u_long *p) 41748281036SJohn Baldwin { 418f28e1c8fSBruce Evans u_long res; 41948281036SJohn Baldwin 420f28e1c8fSBruce Evans res = 0; 42148281036SJohn Baldwin __asm __volatile( 42248281036SJohn Baldwin " xchgl %1,%0 ; " 42348281036SJohn Baldwin "# atomic_readandclear_long" 4240b194ec8SBruce Evans : "+r" (res), /* 0 */ 425*ee93d117SJung-uk Kim "=m" (*p) /* 1 */ 426*ee93d117SJung-uk Kim : "m" (*p)); 42748281036SJohn Baldwin 428f28e1c8fSBruce Evans return (res); 42948281036SJohn Baldwin } 43048281036SJohn Baldwin 43148281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */ 43248281036SJohn Baldwin 433*ee93d117SJung-uk Kim u_int atomic_readandclear_int(volatile u_int *p); 434*ee93d117SJung-uk Kim u_long atomic_readandclear_long(volatile u_long *p); 43548281036SJohn Baldwin 43648281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */ 43748281036SJohn Baldwin 43886d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 43986d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 44086d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 44186d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 44286d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 44386d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 44486d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 44586d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 4468a6b1c8fSJohn Baldwin 44786d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 44886d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 44986d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 45086d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 45186d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 45286d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 45386d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 45486d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 4558a6b1c8fSJohn Baldwin 45686d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 45786d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 45886d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 45986d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 46086d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 46186d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 46286d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 46386d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 4648448afceSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_int 4658448afceSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_int 4668a6b1c8fSJohn Baldwin 46786d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 46886d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 46986d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 47086d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 47186d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 47286d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 47386d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 47486d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 4758448afceSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_long 4768448afceSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_long 4778a6b1c8fSJohn Baldwin 47848281036SJohn Baldwin /* Operations on 8-bit bytes. */ 4798a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 4808a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 4818a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 4828a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 4838a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 4848a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 4858a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 4868a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 4878a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 4888a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 4898a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 4908a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 4918a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 4928a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 4938a6b1c8fSJohn Baldwin 49448281036SJohn Baldwin /* Operations on 16-bit words. */ 4958a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 4968a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 4978a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 4988a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 4998a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 5008a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 5018a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 5028a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 5038a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 5048a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 5058a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 5068a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 5078a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 5088a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 5098a6b1c8fSJohn Baldwin 51048281036SJohn Baldwin /* Operations on 32-bit double words. */ 5118a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 5128a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 5138a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 5148a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 5158a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 5168a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 5178a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 5188a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 5198a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 5208a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 5218a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 5228a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 5238a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 5248a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 5258a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 5268a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 5278a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 5288a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 5293c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 5308a6b1c8fSJohn Baldwin 53148281036SJohn Baldwin /* Operations on pointers. */ 5326f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 5336f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 5346f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 5356f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 5366f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 5376f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 5386f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 5396f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 5406f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 5416f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 5426f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 5436f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 5446f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 5456f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 5466f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 5476f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 5486f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 5496f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 5506f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 5516f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 5526f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 5536f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 5546f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 5556f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 5566f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 5576f0f8ccaSDag-Erling Smørgrav atomic_load_acq_int((volatile u_int *)(p)) 5586f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 5596f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 5606f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 5616f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 5626f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 5636c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 5646c296ffaSBruce Evans (u_int)(new)) 5656f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 5666c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 5676c296ffaSBruce Evans (u_int)(new)) 5686f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 5696f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 570ccbdd9eeSJohn Baldwin 571f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 5726c296ffaSBruce Evans 573069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 574