1069e9bc1SDoug Rabson /*- 2069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 3069e9bc1SDoug Rabson * All rights reserved. 4069e9bc1SDoug Rabson * 5069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 6069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 7069e9bc1SDoug Rabson * are met: 8069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 9069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 10069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 12069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 13069e9bc1SDoug Rabson * 14069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24069e9bc1SDoug Rabson * SUCH DAMAGE. 25069e9bc1SDoug Rabson * 26c3aac50fSPeter Wemm * $FreeBSD$ 27069e9bc1SDoug Rabson */ 28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 30069e9bc1SDoug Rabson 31a5f50ef9SJoerg Wunsch #ifndef _SYS_CDEFS_H_ 32a5f50ef9SJoerg Wunsch #error this file needs sys/cdefs.h as a prerequisite 33a5f50ef9SJoerg Wunsch #endif 34a5f50ef9SJoerg Wunsch 35422dcc24SKonstantin Belousov #define mb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 36422dcc24SKonstantin Belousov #define wmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 37422dcc24SKonstantin Belousov #define rmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 38db7f0b97SKip Macy 39069e9bc1SDoug Rabson /* 40f28e1c8fSBruce Evans * Various simple operations on memory, each of which is atomic in the 41f28e1c8fSBruce Evans * presence of interrupts and multiple processors. 42069e9bc1SDoug Rabson * 4347b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 4447b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 4547b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 4647b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4747b8bc92SAlan Cox * 4847b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 4947b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 5047b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 5147b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5247b8bc92SAlan Cox * 5347b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 5447b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 5547b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 5647b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57f28e1c8fSBruce Evans * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5847b8bc92SAlan Cox * 5947b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 6047b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 6147b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 6247b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 63f28e1c8fSBruce Evans * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 64069e9bc1SDoug Rabson */ 65069e9bc1SDoug Rabson 6647b8bc92SAlan Cox /* 6708c40841SAlan Cox * The above functions are expanded inline in the statically-linked 6808c40841SAlan Cox * kernel. Lock prefixes are generated if an SMP kernel is being 6908c40841SAlan Cox * built. 7008c40841SAlan Cox * 7108c40841SAlan Cox * Kernel modules call real functions which are built into the kernel. 7208c40841SAlan Cox * This allows kernel modules to be portable between UP and SMP systems. 7347b8bc92SAlan Cox */ 7448281036SJohn Baldwin #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 75e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 7686d2e48cSAttilio Rao void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 7786d2e48cSAttilio Rao void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 7808c40841SAlan Cox 79b4645202SJohn Baldwin int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 8086d2e48cSAttilio Rao int atomic_cmpset_barr_int(volatile u_int *dst, u_int exp, u_int src); 813c2bc2bfSJohn Baldwin u_int atomic_fetchadd_int(volatile u_int *p, u_int v); 82819e370cSPoul-Henning Kamp 838a6b1c8fSJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 848a6b1c8fSJohn Baldwin u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 858306a37bSMark Murray void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 868a6b1c8fSJohn Baldwin 8748281036SJohn Baldwin #else /* !KLD_MODULE && __GNUCLIKE_ASM */ 884c5aee92SMark Murray 892a89a48fSJohn Baldwin /* 90f28e1c8fSBruce Evans * For userland, always use lock prefixes so that the binaries will run 91f28e1c8fSBruce Evans * on both SMP and !SMP systems. 922a89a48fSJohn Baldwin */ 932a89a48fSJohn Baldwin #if defined(SMP) || !defined(_KERNEL) 947e4277e5SBruce Evans #define MPLOCKED "lock ; " 95d2f22d70SBruce Evans #else 9647b8bc92SAlan Cox #define MPLOCKED 97d2f22d70SBruce Evans #endif 98069e9bc1SDoug Rabson 9947b8bc92SAlan Cox /* 10086d2e48cSAttilio Rao * The assembly is volatilized to avoid code chunk removal by the compiler. 10186d2e48cSAttilio Rao * GCC aggressively reorders operations and memory clobbering is necessary 10286d2e48cSAttilio Rao * in order to avoid that for memory barriers. 10347b8bc92SAlan Cox */ 104e4e991e1SJohn Baldwin #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10547b8bc92SAlan Cox static __inline void \ 10603e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10747b8bc92SAlan Cox { \ 1087e4277e5SBruce Evans __asm __volatile(MPLOCKED OP \ 10980d52f16SJohn Baldwin : "=m" (*p) \ 11080d52f16SJohn Baldwin : CONS (V), "m" (*p)); \ 1116d800f89SBruce Evans } \ 11286d2e48cSAttilio Rao \ 11386d2e48cSAttilio Rao static __inline void \ 11486d2e48cSAttilio Rao atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11586d2e48cSAttilio Rao { \ 11686d2e48cSAttilio Rao __asm __volatile(MPLOCKED OP \ 11786d2e48cSAttilio Rao : "=m" (*p) \ 11886d2e48cSAttilio Rao : CONS (V), "m" (*p) \ 11986d2e48cSAttilio Rao : "memory"); \ 12086d2e48cSAttilio Rao } \ 1216d800f89SBruce Evans struct __hack 1224c5aee92SMark Murray 123819e370cSPoul-Henning Kamp /* 124819e370cSPoul-Henning Kamp * Atomic compare and set, used by the mutex functions 125819e370cSPoul-Henning Kamp * 126819e370cSPoul-Henning Kamp * if (*dst == exp) *dst = src (all 32 bit words) 127819e370cSPoul-Henning Kamp * 128819e370cSPoul-Henning Kamp * Returns 0 on failure, non-zero on success 129819e370cSPoul-Henning Kamp */ 130819e370cSPoul-Henning Kamp 131f28e1c8fSBruce Evans #ifdef CPU_DISABLE_CMPXCHG 1324c5aee92SMark Murray 133819e370cSPoul-Henning Kamp static __inline int 134819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 135819e370cSPoul-Henning Kamp { 13626ab2d1dSBruce Evans u_char res; 137819e370cSPoul-Henning Kamp 138819e370cSPoul-Henning Kamp __asm __volatile( 139819e370cSPoul-Henning Kamp " pushfl ; " 140819e370cSPoul-Henning Kamp " cli ; " 14126ab2d1dSBruce Evans " cmpl %3,%4 ; " 142819e370cSPoul-Henning Kamp " jne 1f ; " 14380d52f16SJohn Baldwin " movl %2,%1 ; " 144819e370cSPoul-Henning Kamp "1: " 14526ab2d1dSBruce Evans " sete %0 ; " 146819e370cSPoul-Henning Kamp " popfl ; " 147819e370cSPoul-Henning Kamp "# atomic_cmpset_int" 14826ab2d1dSBruce Evans : "=q" (res), /* 0 */ 14980d52f16SJohn Baldwin "=m" (*dst) /* 1 */ 15080d52f16SJohn Baldwin : "r" (src), /* 2 */ 15126ab2d1dSBruce Evans "r" (exp), /* 3 */ 15226ab2d1dSBruce Evans "m" (*dst) /* 4 */ 153819e370cSPoul-Henning Kamp : "memory"); 154819e370cSPoul-Henning Kamp 155819e370cSPoul-Henning Kamp return (res); 156819e370cSPoul-Henning Kamp } 1574c5aee92SMark Murray 158f28e1c8fSBruce Evans #else /* !CPU_DISABLE_CMPXCHG */ 1594c5aee92SMark Murray 160819e370cSPoul-Henning Kamp static __inline int 161819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 162819e370cSPoul-Henning Kamp { 16326ab2d1dSBruce Evans u_char res; 164819e370cSPoul-Henning Kamp 165819e370cSPoul-Henning Kamp __asm __volatile( 1667e4277e5SBruce Evans " " MPLOCKED " " 16780d52f16SJohn Baldwin " cmpxchgl %2,%1 ; " 16826ab2d1dSBruce Evans " sete %0 ; " 169819e370cSPoul-Henning Kamp "1: " 170819e370cSPoul-Henning Kamp "# atomic_cmpset_int" 17126ab2d1dSBruce Evans : "=a" (res), /* 0 */ 17280d52f16SJohn Baldwin "=m" (*dst) /* 1 */ 17380d52f16SJohn Baldwin : "r" (src), /* 2 */ 17426ab2d1dSBruce Evans "a" (exp), /* 3 */ 17526ab2d1dSBruce Evans "m" (*dst) /* 4 */ 176819e370cSPoul-Henning Kamp : "memory"); 177819e370cSPoul-Henning Kamp 178819e370cSPoul-Henning Kamp return (res); 179819e370cSPoul-Henning Kamp } 1804c5aee92SMark Murray 181f28e1c8fSBruce Evans #endif /* CPU_DISABLE_CMPXCHG */ 1824c5aee92SMark Murray 18386d2e48cSAttilio Rao #define atomic_cmpset_barr_int atomic_cmpset_int 18486d2e48cSAttilio Rao 1853c2bc2bfSJohn Baldwin /* 1863c2bc2bfSJohn Baldwin * Atomically add the value of v to the integer pointed to by p and return 1873c2bc2bfSJohn Baldwin * the previous value of *p. 1883c2bc2bfSJohn Baldwin */ 1893c2bc2bfSJohn Baldwin static __inline u_int 1903c2bc2bfSJohn Baldwin atomic_fetchadd_int(volatile u_int *p, u_int v) 1913c2bc2bfSJohn Baldwin { 1923c2bc2bfSJohn Baldwin 1933c2bc2bfSJohn Baldwin __asm __volatile( 1947e4277e5SBruce Evans " " MPLOCKED " " 1953c2bc2bfSJohn Baldwin " xaddl %0, %1 ; " 1963c2bc2bfSJohn Baldwin "# atomic_fetchadd_int" 1973c2bc2bfSJohn Baldwin : "+r" (v), /* 0 (result) */ 1983c2bc2bfSJohn Baldwin "=m" (*p) /* 1 */ 1993c2bc2bfSJohn Baldwin : "m" (*p)); /* 2 */ 2003c2bc2bfSJohn Baldwin 2013c2bc2bfSJohn Baldwin return (v); 2023c2bc2bfSJohn Baldwin } 2033c2bc2bfSJohn Baldwin 20490baa95fSJohn Baldwin #if defined(_KERNEL) && !defined(SMP) 2054c5aee92SMark Murray 206ccbdd9eeSJohn Baldwin /* 20757621b8bSJohn Baldwin * We assume that a = b will do atomic loads and stores. However, on a 20857621b8bSJohn Baldwin * PentiumPro or higher, reads may pass writes, so for that case we have 20957621b8bSJohn Baldwin * to use a serializing instruction (i.e. with LOCK) to do the load in 21057621b8bSJohn Baldwin * SMP kernels. For UP kernels, however, the cache of the single processor 21186d2e48cSAttilio Rao * is always consistent, so we only need to take care of compiler. 212ccbdd9eeSJohn Baldwin */ 2139d979d89SJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 214ccbdd9eeSJohn Baldwin static __inline u_##TYPE \ 215ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 216ccbdd9eeSJohn Baldwin { \ 21786d2e48cSAttilio Rao u_##TYPE tmp; \ 21886d2e48cSAttilio Rao \ 21986d2e48cSAttilio Rao tmp = *p; \ 22086d2e48cSAttilio Rao __asm __volatile("" : : : "memory"); \ 22186d2e48cSAttilio Rao return (tmp); \ 222ccbdd9eeSJohn Baldwin } \ 223ccbdd9eeSJohn Baldwin \ 224ccbdd9eeSJohn Baldwin static __inline void \ 225ccbdd9eeSJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 226ccbdd9eeSJohn Baldwin { \ 22786d2e48cSAttilio Rao __asm __volatile("" : : : "memory"); \ 22857621b8bSJohn Baldwin *p = v; \ 2296d800f89SBruce Evans } \ 2306d800f89SBruce Evans struct __hack 2314c5aee92SMark Murray 232f28e1c8fSBruce Evans #else /* !(_KERNEL && !SMP) */ 233ccbdd9eeSJohn Baldwin 2349d979d89SJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 2359d979d89SJohn Baldwin static __inline u_##TYPE \ 2369d979d89SJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 2379d979d89SJohn Baldwin { \ 2389d979d89SJohn Baldwin u_##TYPE res; \ 2399d979d89SJohn Baldwin \ 2407e4277e5SBruce Evans __asm __volatile(MPLOCKED LOP \ 241f28e1c8fSBruce Evans : "=a" (res), /* 0 */ \ 24280d52f16SJohn Baldwin "=m" (*p) /* 1 */ \ 24380d52f16SJohn Baldwin : "m" (*p) /* 2 */ \ 24480d52f16SJohn Baldwin : "memory"); \ 2459d979d89SJohn Baldwin \ 2469d979d89SJohn Baldwin return (res); \ 2479d979d89SJohn Baldwin } \ 2489d979d89SJohn Baldwin \ 2499d979d89SJohn Baldwin /* \ 2509d979d89SJohn Baldwin * The XCHG instruction asserts LOCK automagically. \ 2519d979d89SJohn Baldwin */ \ 2529d979d89SJohn Baldwin static __inline void \ 2539d979d89SJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 2549d979d89SJohn Baldwin { \ 2559d979d89SJohn Baldwin __asm __volatile(SOP \ 25680d52f16SJohn Baldwin : "=m" (*p), /* 0 */ \ 2579d979d89SJohn Baldwin "+r" (v) /* 1 */ \ 25886d2e48cSAttilio Rao : "m" (*p) /* 2 */ \ 25986d2e48cSAttilio Rao : "memory"); \ 2606d800f89SBruce Evans } \ 2616d800f89SBruce Evans struct __hack 2624c5aee92SMark Murray 263f28e1c8fSBruce Evans #endif /* _KERNEL && !SMP */ 2644c5aee92SMark Murray 26548281036SJohn Baldwin #endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 2668a6b1c8fSJohn Baldwin 2678306a37bSMark Murray ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 2688306a37bSMark Murray ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 2698306a37bSMark Murray ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 2708306a37bSMark Murray ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 2718a6b1c8fSJohn Baldwin 2728306a37bSMark Murray ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 2738306a37bSMark Murray ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 2748306a37bSMark Murray ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 2758306a37bSMark Murray ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 2768a6b1c8fSJohn Baldwin 2778306a37bSMark Murray ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 2788306a37bSMark Murray ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 2798306a37bSMark Murray ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 2808306a37bSMark Murray ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 2818a6b1c8fSJohn Baldwin 2828306a37bSMark Murray ATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 2838306a37bSMark Murray ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 2848306a37bSMark Murray ATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 2858306a37bSMark Murray ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 2869d979d89SJohn Baldwin 2878306a37bSMark Murray ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0"); 2888306a37bSMark Murray ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0"); 2898306a37bSMark Murray ATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0"); 2908306a37bSMark Murray ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0"); 291ccbdd9eeSJohn Baldwin 2928a6b1c8fSJohn Baldwin #undef ATOMIC_ASM 293ccbdd9eeSJohn Baldwin #undef ATOMIC_STORE_LOAD 294ccbdd9eeSJohn Baldwin 295f28e1c8fSBruce Evans #ifndef WANT_FUNCTIONS 29648281036SJohn Baldwin 29748281036SJohn Baldwin static __inline int 29848281036SJohn Baldwin atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src) 29948281036SJohn Baldwin { 30048281036SJohn Baldwin 30148281036SJohn Baldwin return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, 30248281036SJohn Baldwin (u_int)src)); 30348281036SJohn Baldwin } 30448281036SJohn Baldwin 30586d2e48cSAttilio Rao static __inline int 30686d2e48cSAttilio Rao atomic_cmpset_barr_long(volatile u_long *dst, u_long exp, u_long src) 30786d2e48cSAttilio Rao { 30886d2e48cSAttilio Rao 30986d2e48cSAttilio Rao return (atomic_cmpset_barr_int((volatile u_int *)dst, (u_int)exp, 31086d2e48cSAttilio Rao (u_int)src)); 31186d2e48cSAttilio Rao } 31286d2e48cSAttilio Rao 3136eb4157fSPawel Jakub Dawidek static __inline u_long 3146eb4157fSPawel Jakub Dawidek atomic_fetchadd_long(volatile u_long *p, u_long v) 3156eb4157fSPawel Jakub Dawidek { 3166eb4157fSPawel Jakub Dawidek 3176eb4157fSPawel Jakub Dawidek return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 3186eb4157fSPawel Jakub Dawidek } 3196eb4157fSPawel Jakub Dawidek 32048281036SJohn Baldwin /* Read the current value and store a zero in the destination. */ 32148281036SJohn Baldwin #ifdef __GNUCLIKE_ASM 32248281036SJohn Baldwin 32348281036SJohn Baldwin static __inline u_int 32448281036SJohn Baldwin atomic_readandclear_int(volatile u_int *addr) 32548281036SJohn Baldwin { 326f28e1c8fSBruce Evans u_int res; 32748281036SJohn Baldwin 328f28e1c8fSBruce Evans res = 0; 32948281036SJohn Baldwin __asm __volatile( 33048281036SJohn Baldwin " xchgl %1,%0 ; " 33148281036SJohn Baldwin "# atomic_readandclear_int" 332f28e1c8fSBruce Evans : "+r" (res), /* 0 */ 333f28e1c8fSBruce Evans "=m" (*addr) /* 1 */ 33480d52f16SJohn Baldwin : "m" (*addr)); 33548281036SJohn Baldwin 336f28e1c8fSBruce Evans return (res); 33748281036SJohn Baldwin } 33848281036SJohn Baldwin 33948281036SJohn Baldwin static __inline u_long 34048281036SJohn Baldwin atomic_readandclear_long(volatile u_long *addr) 34148281036SJohn Baldwin { 342f28e1c8fSBruce Evans u_long res; 34348281036SJohn Baldwin 344f28e1c8fSBruce Evans res = 0; 34548281036SJohn Baldwin __asm __volatile( 34648281036SJohn Baldwin " xchgl %1,%0 ; " 34748281036SJohn Baldwin "# atomic_readandclear_long" 3480b194ec8SBruce Evans : "+r" (res), /* 0 */ 349f28e1c8fSBruce Evans "=m" (*addr) /* 1 */ 35080d52f16SJohn Baldwin : "m" (*addr)); 35148281036SJohn Baldwin 352f28e1c8fSBruce Evans return (res); 35348281036SJohn Baldwin } 35448281036SJohn Baldwin 35548281036SJohn Baldwin #else /* !__GNUCLIKE_ASM */ 35648281036SJohn Baldwin 357f28e1c8fSBruce Evans u_int atomic_readandclear_int(volatile u_int *addr); 358f28e1c8fSBruce Evans u_long atomic_readandclear_long(volatile u_long *addr); 35948281036SJohn Baldwin 36048281036SJohn Baldwin #endif /* __GNUCLIKE_ASM */ 36148281036SJohn Baldwin 36286d2e48cSAttilio Rao #define atomic_set_acq_char atomic_set_barr_char 36386d2e48cSAttilio Rao #define atomic_set_rel_char atomic_set_barr_char 36486d2e48cSAttilio Rao #define atomic_clear_acq_char atomic_clear_barr_char 36586d2e48cSAttilio Rao #define atomic_clear_rel_char atomic_clear_barr_char 36686d2e48cSAttilio Rao #define atomic_add_acq_char atomic_add_barr_char 36786d2e48cSAttilio Rao #define atomic_add_rel_char atomic_add_barr_char 36886d2e48cSAttilio Rao #define atomic_subtract_acq_char atomic_subtract_barr_char 36986d2e48cSAttilio Rao #define atomic_subtract_rel_char atomic_subtract_barr_char 3708a6b1c8fSJohn Baldwin 37186d2e48cSAttilio Rao #define atomic_set_acq_short atomic_set_barr_short 37286d2e48cSAttilio Rao #define atomic_set_rel_short atomic_set_barr_short 37386d2e48cSAttilio Rao #define atomic_clear_acq_short atomic_clear_barr_short 37486d2e48cSAttilio Rao #define atomic_clear_rel_short atomic_clear_barr_short 37586d2e48cSAttilio Rao #define atomic_add_acq_short atomic_add_barr_short 37686d2e48cSAttilio Rao #define atomic_add_rel_short atomic_add_barr_short 37786d2e48cSAttilio Rao #define atomic_subtract_acq_short atomic_subtract_barr_short 37886d2e48cSAttilio Rao #define atomic_subtract_rel_short atomic_subtract_barr_short 3798a6b1c8fSJohn Baldwin 38086d2e48cSAttilio Rao #define atomic_set_acq_int atomic_set_barr_int 38186d2e48cSAttilio Rao #define atomic_set_rel_int atomic_set_barr_int 38286d2e48cSAttilio Rao #define atomic_clear_acq_int atomic_clear_barr_int 38386d2e48cSAttilio Rao #define atomic_clear_rel_int atomic_clear_barr_int 38486d2e48cSAttilio Rao #define atomic_add_acq_int atomic_add_barr_int 38586d2e48cSAttilio Rao #define atomic_add_rel_int atomic_add_barr_int 38686d2e48cSAttilio Rao #define atomic_subtract_acq_int atomic_subtract_barr_int 38786d2e48cSAttilio Rao #define atomic_subtract_rel_int atomic_subtract_barr_int 38886d2e48cSAttilio Rao #define atomic_cmpset_acq_int atomic_cmpset_barr_int 38986d2e48cSAttilio Rao #define atomic_cmpset_rel_int atomic_cmpset_barr_int 3908a6b1c8fSJohn Baldwin 39186d2e48cSAttilio Rao #define atomic_set_acq_long atomic_set_barr_long 39286d2e48cSAttilio Rao #define atomic_set_rel_long atomic_set_barr_long 39386d2e48cSAttilio Rao #define atomic_clear_acq_long atomic_clear_barr_long 39486d2e48cSAttilio Rao #define atomic_clear_rel_long atomic_clear_barr_long 39586d2e48cSAttilio Rao #define atomic_add_acq_long atomic_add_barr_long 39686d2e48cSAttilio Rao #define atomic_add_rel_long atomic_add_barr_long 39786d2e48cSAttilio Rao #define atomic_subtract_acq_long atomic_subtract_barr_long 39886d2e48cSAttilio Rao #define atomic_subtract_rel_long atomic_subtract_barr_long 39986d2e48cSAttilio Rao #define atomic_cmpset_acq_long atomic_cmpset_barr_long 40086d2e48cSAttilio Rao #define atomic_cmpset_rel_long atomic_cmpset_barr_long 4018a6b1c8fSJohn Baldwin 40248281036SJohn Baldwin /* Operations on 8-bit bytes. */ 4038a6b1c8fSJohn Baldwin #define atomic_set_8 atomic_set_char 4048a6b1c8fSJohn Baldwin #define atomic_set_acq_8 atomic_set_acq_char 4058a6b1c8fSJohn Baldwin #define atomic_set_rel_8 atomic_set_rel_char 4068a6b1c8fSJohn Baldwin #define atomic_clear_8 atomic_clear_char 4078a6b1c8fSJohn Baldwin #define atomic_clear_acq_8 atomic_clear_acq_char 4088a6b1c8fSJohn Baldwin #define atomic_clear_rel_8 atomic_clear_rel_char 4098a6b1c8fSJohn Baldwin #define atomic_add_8 atomic_add_char 4108a6b1c8fSJohn Baldwin #define atomic_add_acq_8 atomic_add_acq_char 4118a6b1c8fSJohn Baldwin #define atomic_add_rel_8 atomic_add_rel_char 4128a6b1c8fSJohn Baldwin #define atomic_subtract_8 atomic_subtract_char 4138a6b1c8fSJohn Baldwin #define atomic_subtract_acq_8 atomic_subtract_acq_char 4148a6b1c8fSJohn Baldwin #define atomic_subtract_rel_8 atomic_subtract_rel_char 4158a6b1c8fSJohn Baldwin #define atomic_load_acq_8 atomic_load_acq_char 4168a6b1c8fSJohn Baldwin #define atomic_store_rel_8 atomic_store_rel_char 4178a6b1c8fSJohn Baldwin 41848281036SJohn Baldwin /* Operations on 16-bit words. */ 4198a6b1c8fSJohn Baldwin #define atomic_set_16 atomic_set_short 4208a6b1c8fSJohn Baldwin #define atomic_set_acq_16 atomic_set_acq_short 4218a6b1c8fSJohn Baldwin #define atomic_set_rel_16 atomic_set_rel_short 4228a6b1c8fSJohn Baldwin #define atomic_clear_16 atomic_clear_short 4238a6b1c8fSJohn Baldwin #define atomic_clear_acq_16 atomic_clear_acq_short 4248a6b1c8fSJohn Baldwin #define atomic_clear_rel_16 atomic_clear_rel_short 4258a6b1c8fSJohn Baldwin #define atomic_add_16 atomic_add_short 4268a6b1c8fSJohn Baldwin #define atomic_add_acq_16 atomic_add_acq_short 4278a6b1c8fSJohn Baldwin #define atomic_add_rel_16 atomic_add_rel_short 4288a6b1c8fSJohn Baldwin #define atomic_subtract_16 atomic_subtract_short 4298a6b1c8fSJohn Baldwin #define atomic_subtract_acq_16 atomic_subtract_acq_short 4308a6b1c8fSJohn Baldwin #define atomic_subtract_rel_16 atomic_subtract_rel_short 4318a6b1c8fSJohn Baldwin #define atomic_load_acq_16 atomic_load_acq_short 4328a6b1c8fSJohn Baldwin #define atomic_store_rel_16 atomic_store_rel_short 4338a6b1c8fSJohn Baldwin 43448281036SJohn Baldwin /* Operations on 32-bit double words. */ 4358a6b1c8fSJohn Baldwin #define atomic_set_32 atomic_set_int 4368a6b1c8fSJohn Baldwin #define atomic_set_acq_32 atomic_set_acq_int 4378a6b1c8fSJohn Baldwin #define atomic_set_rel_32 atomic_set_rel_int 4388a6b1c8fSJohn Baldwin #define atomic_clear_32 atomic_clear_int 4398a6b1c8fSJohn Baldwin #define atomic_clear_acq_32 atomic_clear_acq_int 4408a6b1c8fSJohn Baldwin #define atomic_clear_rel_32 atomic_clear_rel_int 4418a6b1c8fSJohn Baldwin #define atomic_add_32 atomic_add_int 4428a6b1c8fSJohn Baldwin #define atomic_add_acq_32 atomic_add_acq_int 4438a6b1c8fSJohn Baldwin #define atomic_add_rel_32 atomic_add_rel_int 4448a6b1c8fSJohn Baldwin #define atomic_subtract_32 atomic_subtract_int 4458a6b1c8fSJohn Baldwin #define atomic_subtract_acq_32 atomic_subtract_acq_int 4468a6b1c8fSJohn Baldwin #define atomic_subtract_rel_32 atomic_subtract_rel_int 4478a6b1c8fSJohn Baldwin #define atomic_load_acq_32 atomic_load_acq_int 4488a6b1c8fSJohn Baldwin #define atomic_store_rel_32 atomic_store_rel_int 4498a6b1c8fSJohn Baldwin #define atomic_cmpset_32 atomic_cmpset_int 4508a6b1c8fSJohn Baldwin #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 4518a6b1c8fSJohn Baldwin #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 4528a6b1c8fSJohn Baldwin #define atomic_readandclear_32 atomic_readandclear_int 4533c2bc2bfSJohn Baldwin #define atomic_fetchadd_32 atomic_fetchadd_int 4548a6b1c8fSJohn Baldwin 45548281036SJohn Baldwin /* Operations on pointers. */ 4566f0f8ccaSDag-Erling Smørgrav #define atomic_set_ptr(p, v) \ 4576f0f8ccaSDag-Erling Smørgrav atomic_set_int((volatile u_int *)(p), (u_int)(v)) 4586f0f8ccaSDag-Erling Smørgrav #define atomic_set_acq_ptr(p, v) \ 4596f0f8ccaSDag-Erling Smørgrav atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 4606f0f8ccaSDag-Erling Smørgrav #define atomic_set_rel_ptr(p, v) \ 4616f0f8ccaSDag-Erling Smørgrav atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 4626f0f8ccaSDag-Erling Smørgrav #define atomic_clear_ptr(p, v) \ 4636f0f8ccaSDag-Erling Smørgrav atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 4646f0f8ccaSDag-Erling Smørgrav #define atomic_clear_acq_ptr(p, v) \ 4656f0f8ccaSDag-Erling Smørgrav atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 4666f0f8ccaSDag-Erling Smørgrav #define atomic_clear_rel_ptr(p, v) \ 4676f0f8ccaSDag-Erling Smørgrav atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 4686f0f8ccaSDag-Erling Smørgrav #define atomic_add_ptr(p, v) \ 4696f0f8ccaSDag-Erling Smørgrav atomic_add_int((volatile u_int *)(p), (u_int)(v)) 4706f0f8ccaSDag-Erling Smørgrav #define atomic_add_acq_ptr(p, v) \ 4716f0f8ccaSDag-Erling Smørgrav atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 4726f0f8ccaSDag-Erling Smørgrav #define atomic_add_rel_ptr(p, v) \ 4736f0f8ccaSDag-Erling Smørgrav atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 4746f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_ptr(p, v) \ 4756f0f8ccaSDag-Erling Smørgrav atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 4766f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_acq_ptr(p, v) \ 4776f0f8ccaSDag-Erling Smørgrav atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 4786f0f8ccaSDag-Erling Smørgrav #define atomic_subtract_rel_ptr(p, v) \ 4796f0f8ccaSDag-Erling Smørgrav atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 4806f0f8ccaSDag-Erling Smørgrav #define atomic_load_acq_ptr(p) \ 4816f0f8ccaSDag-Erling Smørgrav atomic_load_acq_int((volatile u_int *)(p)) 4826f0f8ccaSDag-Erling Smørgrav #define atomic_store_rel_ptr(p, v) \ 4836f0f8ccaSDag-Erling Smørgrav atomic_store_rel_int((volatile u_int *)(p), (v)) 4846f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_ptr(dst, old, new) \ 4856f0f8ccaSDag-Erling Smørgrav atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 4866f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_acq_ptr(dst, old, new) \ 4876c296ffaSBruce Evans atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 4886c296ffaSBruce Evans (u_int)(new)) 4896f0f8ccaSDag-Erling Smørgrav #define atomic_cmpset_rel_ptr(dst, old, new) \ 4906c296ffaSBruce Evans atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 4916c296ffaSBruce Evans (u_int)(new)) 4926f0f8ccaSDag-Erling Smørgrav #define atomic_readandclear_ptr(p) \ 4936f0f8ccaSDag-Erling Smørgrav atomic_readandclear_int((volatile u_int *)(p)) 494ccbdd9eeSJohn Baldwin 495f28e1c8fSBruce Evans #endif /* !WANT_FUNCTIONS */ 4966c296ffaSBruce Evans 497069e9bc1SDoug Rabson #endif /* !_MACHINE_ATOMIC_H_ */ 498