1069e9bc1SDoug Rabson /*- 2069e9bc1SDoug Rabson * Copyright (c) 1998 Doug Rabson 3069e9bc1SDoug Rabson * All rights reserved. 4069e9bc1SDoug Rabson * 5069e9bc1SDoug Rabson * Redistribution and use in source and binary forms, with or without 6069e9bc1SDoug Rabson * modification, are permitted provided that the following conditions 7069e9bc1SDoug Rabson * are met: 8069e9bc1SDoug Rabson * 1. Redistributions of source code must retain the above copyright 9069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer. 10069e9bc1SDoug Rabson * 2. Redistributions in binary form must reproduce the above copyright 11069e9bc1SDoug Rabson * notice, this list of conditions and the following disclaimer in the 12069e9bc1SDoug Rabson * documentation and/or other materials provided with the distribution. 13069e9bc1SDoug Rabson * 14069e9bc1SDoug Rabson * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15069e9bc1SDoug Rabson * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16069e9bc1SDoug Rabson * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17069e9bc1SDoug Rabson * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18069e9bc1SDoug Rabson * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19069e9bc1SDoug Rabson * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20069e9bc1SDoug Rabson * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21069e9bc1SDoug Rabson * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22069e9bc1SDoug Rabson * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23069e9bc1SDoug Rabson * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24069e9bc1SDoug Rabson * SUCH DAMAGE. 25069e9bc1SDoug Rabson * 26c3aac50fSPeter Wemm * $FreeBSD$ 27069e9bc1SDoug Rabson */ 28069e9bc1SDoug Rabson #ifndef _MACHINE_ATOMIC_H_ 29069e9bc1SDoug Rabson #define _MACHINE_ATOMIC_H_ 30069e9bc1SDoug Rabson 31069e9bc1SDoug Rabson /* 32069e9bc1SDoug Rabson * Various simple arithmetic on memory which is atomic in the presence 3347b8bc92SAlan Cox * of interrupts and multiple processors. 34069e9bc1SDoug Rabson * 3547b8bc92SAlan Cox * atomic_set_char(P, V) (*(u_char*)(P) |= (V)) 3647b8bc92SAlan Cox * atomic_clear_char(P, V) (*(u_char*)(P) &= ~(V)) 3747b8bc92SAlan Cox * atomic_add_char(P, V) (*(u_char*)(P) += (V)) 3847b8bc92SAlan Cox * atomic_subtract_char(P, V) (*(u_char*)(P) -= (V)) 3947b8bc92SAlan Cox * 4047b8bc92SAlan Cox * atomic_set_short(P, V) (*(u_short*)(P) |= (V)) 4147b8bc92SAlan Cox * atomic_clear_short(P, V) (*(u_short*)(P) &= ~(V)) 4247b8bc92SAlan Cox * atomic_add_short(P, V) (*(u_short*)(P) += (V)) 4347b8bc92SAlan Cox * atomic_subtract_short(P, V) (*(u_short*)(P) -= (V)) 4447b8bc92SAlan Cox * 4547b8bc92SAlan Cox * atomic_set_int(P, V) (*(u_int*)(P) |= (V)) 4647b8bc92SAlan Cox * atomic_clear_int(P, V) (*(u_int*)(P) &= ~(V)) 4747b8bc92SAlan Cox * atomic_add_int(P, V) (*(u_int*)(P) += (V)) 4847b8bc92SAlan Cox * atomic_subtract_int(P, V) (*(u_int*)(P) -= (V)) 49b4645202SJohn Baldwin * atomic_readandclear_int(P) (return *(u_int*)P; *(u_int*)P = 0;) 5047b8bc92SAlan Cox * 5147b8bc92SAlan Cox * atomic_set_long(P, V) (*(u_long*)(P) |= (V)) 5247b8bc92SAlan Cox * atomic_clear_long(P, V) (*(u_long*)(P) &= ~(V)) 5347b8bc92SAlan Cox * atomic_add_long(P, V) (*(u_long*)(P) += (V)) 5447b8bc92SAlan Cox * atomic_subtract_long(P, V) (*(u_long*)(P) -= (V)) 55b4645202SJohn Baldwin * atomic_readandclear_long(P) (return *(u_long*)P; *(u_long*)P = 0;) 56069e9bc1SDoug Rabson */ 57069e9bc1SDoug Rabson 5847b8bc92SAlan Cox /* 5908c40841SAlan Cox * The above functions are expanded inline in the statically-linked 6008c40841SAlan Cox * kernel. Lock prefixes are generated if an SMP kernel is being 6108c40841SAlan Cox * built. 6208c40841SAlan Cox * 6308c40841SAlan Cox * Kernel modules call real functions which are built into the kernel. 6408c40841SAlan Cox * This allows kernel modules to be portable between UP and SMP systems. 6547b8bc92SAlan Cox */ 6608c40841SAlan Cox #if defined(KLD_MODULE) 6708c40841SAlan Cox #define ATOMIC_ASM(NAME, TYPE, OP, V) \ 68b4645202SJohn Baldwin void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); 6908c40841SAlan Cox 70b4645202SJohn Baldwin int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 71819e370cSPoul-Henning Kamp 7208c40841SAlan Cox #else /* !KLD_MODULE */ 7308c40841SAlan Cox #if defined(SMP) 74819e370cSPoul-Henning Kamp #if defined(LOCORE) 75819e370cSPoul-Henning Kamp #define MPLOCKED lock ; 76819e370cSPoul-Henning Kamp #else 7747b8bc92SAlan Cox #define MPLOCKED "lock ; " 78819e370cSPoul-Henning Kamp #endif 79e58bb1c4SAlan Cox #else 8047b8bc92SAlan Cox #define MPLOCKED 81e58bb1c4SAlan Cox #endif 82069e9bc1SDoug Rabson 8347b8bc92SAlan Cox /* 8447b8bc92SAlan Cox * The assembly is volatilized to demark potential before-and-after side 8547b8bc92SAlan Cox * effects if an interrupt or SMP collision were to occur. 8647b8bc92SAlan Cox */ 87693612ebSPeter Wemm #if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 88693612ebSPeter Wemm /* egcs 1.1.2+ version */ 8947b8bc92SAlan Cox #define ATOMIC_ASM(NAME, TYPE, OP, V) \ 9047b8bc92SAlan Cox static __inline void \ 9103e3bc8eSAlan Cox atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 9247b8bc92SAlan Cox { \ 9347b8bc92SAlan Cox __asm __volatile(MPLOCKED OP \ 9403e3bc8eSAlan Cox : "=m" (*p) \ 9503e3bc8eSAlan Cox : "0" (*p), "ir" (V)); \ 9647b8bc92SAlan Cox } 97693612ebSPeter Wemm 98819e370cSPoul-Henning Kamp /* 99819e370cSPoul-Henning Kamp * Atomic compare and set, used by the mutex functions 100819e370cSPoul-Henning Kamp * 101819e370cSPoul-Henning Kamp * if (*dst == exp) *dst = src (all 32 bit words) 102819e370cSPoul-Henning Kamp * 103819e370cSPoul-Henning Kamp * Returns 0 on failure, non-zero on success 104819e370cSPoul-Henning Kamp */ 105819e370cSPoul-Henning Kamp 106819e370cSPoul-Henning Kamp #if defined(I386_CPU) 107819e370cSPoul-Henning Kamp static __inline int 108819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 109819e370cSPoul-Henning Kamp { 110819e370cSPoul-Henning Kamp int res = exp; 111819e370cSPoul-Henning Kamp 112819e370cSPoul-Henning Kamp __asm __volatile( 113819e370cSPoul-Henning Kamp " pushfl ; " 114819e370cSPoul-Henning Kamp " cli ; " 115819e370cSPoul-Henning Kamp " cmpl %1,%3 ; " 116819e370cSPoul-Henning Kamp " jne 1f ; " 117819e370cSPoul-Henning Kamp " movl %2,%3 ; " 118819e370cSPoul-Henning Kamp "1: " 119819e370cSPoul-Henning Kamp " sete %%al; " 120819e370cSPoul-Henning Kamp " movzbl %%al,%0 ; " 121819e370cSPoul-Henning Kamp " popfl ; " 122819e370cSPoul-Henning Kamp "# atomic_cmpset_int" 123819e370cSPoul-Henning Kamp : "=a" (res) /* 0 (result) */ 124819e370cSPoul-Henning Kamp : "0" (exp), /* 1 */ 125819e370cSPoul-Henning Kamp "r" (src), /* 2 */ 126819e370cSPoul-Henning Kamp "m" (*(dst)) /* 3 */ 127819e370cSPoul-Henning Kamp : "memory"); 128819e370cSPoul-Henning Kamp 129819e370cSPoul-Henning Kamp return (res); 130819e370cSPoul-Henning Kamp } 131819e370cSPoul-Henning Kamp #else /* defined(I386_CPU) */ 132819e370cSPoul-Henning Kamp static __inline int 133819e370cSPoul-Henning Kamp atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 134819e370cSPoul-Henning Kamp { 135819e370cSPoul-Henning Kamp int res = exp; 136819e370cSPoul-Henning Kamp 137819e370cSPoul-Henning Kamp __asm __volatile ( 138819e370cSPoul-Henning Kamp " " MPLOCKED " " 139819e370cSPoul-Henning Kamp " cmpxchgl %2,%3 ; " 140819e370cSPoul-Henning Kamp " setz %%al ; " 141819e370cSPoul-Henning Kamp " movzbl %%al,%0 ; " 142819e370cSPoul-Henning Kamp "1: " 143819e370cSPoul-Henning Kamp "# atomic_cmpset_int" 144819e370cSPoul-Henning Kamp : "=a" (res) /* 0 (result) */ 145819e370cSPoul-Henning Kamp : "0" (exp), /* 1 */ 146819e370cSPoul-Henning Kamp "r" (src), /* 2 */ 147819e370cSPoul-Henning Kamp "m" (*(dst)) /* 3 */ 148819e370cSPoul-Henning Kamp : "memory"); 149819e370cSPoul-Henning Kamp 150819e370cSPoul-Henning Kamp return (res); 151819e370cSPoul-Henning Kamp } 152819e370cSPoul-Henning Kamp #endif /* defined(I386_CPU) */ 153819e370cSPoul-Henning Kamp 154ccbdd9eeSJohn Baldwin #define atomic_cmpset_acq_int atomic_cmpset_int 155ccbdd9eeSJohn Baldwin #define atomic_cmpset_rel_int atomic_cmpset_int 156ccbdd9eeSJohn Baldwin 157693612ebSPeter Wemm #else 158693612ebSPeter Wemm /* gcc <= 2.8 version */ 159693612ebSPeter Wemm #define ATOMIC_ASM(NAME, TYPE, OP, V) \ 160693612ebSPeter Wemm static __inline void \ 161693612ebSPeter Wemm atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 162693612ebSPeter Wemm { \ 163693612ebSPeter Wemm __asm __volatile(MPLOCKED OP \ 164693612ebSPeter Wemm : "=m" (*p) \ 165693612ebSPeter Wemm : "ir" (V)); \ 166ccbdd9eeSJohn Baldwin } \ 167ccbdd9eeSJohn Baldwin \ 168ccbdd9eeSJohn Baldwin 169693612ebSPeter Wemm #endif 17008c40841SAlan Cox #endif /* KLD_MODULE */ 171069e9bc1SDoug Rabson 172b8710473SPeter Wemm #if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 173693612ebSPeter Wemm 174693612ebSPeter Wemm /* egcs 1.1.2+ version */ 17525f01030SDavid E. O'Brien ATOMIC_ASM(set, char, "orb %b2,%0", v) 17625f01030SDavid E. O'Brien ATOMIC_ASM(clear, char, "andb %b2,%0", ~v) 17725f01030SDavid E. O'Brien ATOMIC_ASM(add, char, "addb %b2,%0", v) 17825f01030SDavid E. O'Brien ATOMIC_ASM(subtract, char, "subb %b2,%0", v) 179069e9bc1SDoug Rabson 18025f01030SDavid E. O'Brien ATOMIC_ASM(set, short, "orw %w2,%0", v) 18125f01030SDavid E. O'Brien ATOMIC_ASM(clear, short, "andw %w2,%0", ~v) 18225f01030SDavid E. O'Brien ATOMIC_ASM(add, short, "addw %w2,%0", v) 18325f01030SDavid E. O'Brien ATOMIC_ASM(subtract, short, "subw %w2,%0", v) 184e58bb1c4SAlan Cox 18547b8bc92SAlan Cox ATOMIC_ASM(set, int, "orl %2,%0", v) 18647b8bc92SAlan Cox ATOMIC_ASM(clear, int, "andl %2,%0", ~v) 18747b8bc92SAlan Cox ATOMIC_ASM(add, int, "addl %2,%0", v) 18847b8bc92SAlan Cox ATOMIC_ASM(subtract, int, "subl %2,%0", v) 189e58bb1c4SAlan Cox 19047b8bc92SAlan Cox ATOMIC_ASM(set, long, "orl %2,%0", v) 19147b8bc92SAlan Cox ATOMIC_ASM(clear, long, "andl %2,%0", ~v) 19247b8bc92SAlan Cox ATOMIC_ASM(add, long, "addl %2,%0", v) 19347b8bc92SAlan Cox ATOMIC_ASM(subtract, long, "subl %2,%0", v) 194069e9bc1SDoug Rabson 195181d2137SEivind Eklund #else 196181d2137SEivind Eklund 197693612ebSPeter Wemm /* gcc <= 2.8 version */ 198693612ebSPeter Wemm ATOMIC_ASM(set, char, "orb %1,%0", v) 199693612ebSPeter Wemm ATOMIC_ASM(clear, char, "andb %1,%0", ~v) 200693612ebSPeter Wemm ATOMIC_ASM(add, char, "addb %1,%0", v) 201693612ebSPeter Wemm ATOMIC_ASM(subtract, char, "subb %1,%0", v) 202181d2137SEivind Eklund 203693612ebSPeter Wemm ATOMIC_ASM(set, short, "orw %1,%0", v) 204693612ebSPeter Wemm ATOMIC_ASM(clear, short, "andw %1,%0", ~v) 205693612ebSPeter Wemm ATOMIC_ASM(add, short, "addw %1,%0", v) 206693612ebSPeter Wemm ATOMIC_ASM(subtract, short, "subw %1,%0", v) 207181d2137SEivind Eklund 208693612ebSPeter Wemm ATOMIC_ASM(set, int, "orl %1,%0", v) 209693612ebSPeter Wemm ATOMIC_ASM(clear, int, "andl %1,%0", ~v) 210693612ebSPeter Wemm ATOMIC_ASM(add, int, "addl %1,%0", v) 211693612ebSPeter Wemm ATOMIC_ASM(subtract, int, "subl %1,%0", v) 212693612ebSPeter Wemm 213693612ebSPeter Wemm ATOMIC_ASM(set, long, "orl %1,%0", v) 214693612ebSPeter Wemm ATOMIC_ASM(clear, long, "andl %1,%0", ~v) 215693612ebSPeter Wemm ATOMIC_ASM(add, long, "addl %1,%0", v) 216693612ebSPeter Wemm ATOMIC_ASM(subtract, long, "subl %1,%0", v) 217693612ebSPeter Wemm 218181d2137SEivind Eklund #endif 219181d2137SEivind Eklund 220ccbdd9eeSJohn Baldwin #undef ATOMIC_ASM 221ccbdd9eeSJohn Baldwin 222819e370cSPoul-Henning Kamp #ifndef WANT_FUNCTIONS 223ccbdd9eeSJohn Baldwin #define ATOMIC_ACQ_REL(NAME, TYPE) \ 224ccbdd9eeSJohn Baldwin static __inline void \ 225ccbdd9eeSJohn Baldwin atomic_##NAME##_acq_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 226ccbdd9eeSJohn Baldwin { \ 227ccbdd9eeSJohn Baldwin __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\ 228ccbdd9eeSJohn Baldwin atomic_##NAME##_##TYPE(p, v); \ 229ccbdd9eeSJohn Baldwin } \ 230ccbdd9eeSJohn Baldwin \ 231ccbdd9eeSJohn Baldwin static __inline void \ 232ccbdd9eeSJohn Baldwin atomic_##NAME##_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 233ccbdd9eeSJohn Baldwin { \ 234ccbdd9eeSJohn Baldwin atomic_##NAME##_##TYPE(p, v); \ 235ccbdd9eeSJohn Baldwin } 236ccbdd9eeSJohn Baldwin 237ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(set, char) 238ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(clear, char) 239ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(add, char) 240ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(subtract, char) 241ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(set, short) 242ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(clear, short) 243ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(add, short) 244ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(subtract, short) 245ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(set, int) 246ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(clear, int) 247ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(add, int) 248ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(subtract, int) 249ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(set, long) 250ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(clear, long) 251ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(add, long) 252ccbdd9eeSJohn Baldwin ATOMIC_ACQ_REL(subtract, long) 253ccbdd9eeSJohn Baldwin 254ccbdd9eeSJohn Baldwin #undef ATOMIC_ACQ_REL 255ccbdd9eeSJohn Baldwin 256ccbdd9eeSJohn Baldwin /* 257ccbdd9eeSJohn Baldwin * We assume that a = b will do atomic loads and stores. 258ccbdd9eeSJohn Baldwin */ 259ccbdd9eeSJohn Baldwin #define ATOMIC_STORE_LOAD(TYPE) \ 260ccbdd9eeSJohn Baldwin static __inline u_##TYPE \ 261ccbdd9eeSJohn Baldwin atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 262ccbdd9eeSJohn Baldwin { \ 263ccbdd9eeSJohn Baldwin __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\ 264ccbdd9eeSJohn Baldwin return (*p); \ 265ccbdd9eeSJohn Baldwin } \ 266ccbdd9eeSJohn Baldwin \ 267ccbdd9eeSJohn Baldwin static __inline void \ 268ccbdd9eeSJohn Baldwin atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 269ccbdd9eeSJohn Baldwin { \ 270ccbdd9eeSJohn Baldwin *p = v; \ 271ccbdd9eeSJohn Baldwin __asm __volatile("" : : : "memory"); \ 272ccbdd9eeSJohn Baldwin } 273ccbdd9eeSJohn Baldwin 274ccbdd9eeSJohn Baldwin ATOMIC_STORE_LOAD(char) 275ccbdd9eeSJohn Baldwin ATOMIC_STORE_LOAD(short) 276ccbdd9eeSJohn Baldwin ATOMIC_STORE_LOAD(int) 277ccbdd9eeSJohn Baldwin ATOMIC_STORE_LOAD(long) 278ccbdd9eeSJohn Baldwin 279ccbdd9eeSJohn Baldwin #undef ATOMIC_STORE_LOAD 280ccbdd9eeSJohn Baldwin 281819e370cSPoul-Henning Kamp static __inline int 282819e370cSPoul-Henning Kamp atomic_cmpset_ptr(volatile void *dst, void *exp, void *src) 283819e370cSPoul-Henning Kamp { 284819e370cSPoul-Henning Kamp 285819e370cSPoul-Henning Kamp return ( 286819e370cSPoul-Henning Kamp atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src)); 287819e370cSPoul-Henning Kamp } 288b4645202SJohn Baldwin 289ccbdd9eeSJohn Baldwin #define atomic_cmpset_acq_ptr atomic_cmpset_ptr 290ccbdd9eeSJohn Baldwin #define atomic_cmpset_rel_ptr atomic_cmpset_ptr 291ccbdd9eeSJohn Baldwin 292ccbdd9eeSJohn Baldwin static __inline void * 293ccbdd9eeSJohn Baldwin atomic_load_acq_ptr(volatile void *p) 294ccbdd9eeSJohn Baldwin { 295ccbdd9eeSJohn Baldwin return (void *)atomic_load_acq_int((volatile u_int *)p); 296ccbdd9eeSJohn Baldwin } 297ccbdd9eeSJohn Baldwin 298ccbdd9eeSJohn Baldwin static __inline void 299ccbdd9eeSJohn Baldwin atomic_store_rel_ptr(volatile void *p, void *v) 300ccbdd9eeSJohn Baldwin { 301ccbdd9eeSJohn Baldwin atomic_store_rel_int((volatile u_int *)p, (u_int)v); 302ccbdd9eeSJohn Baldwin } 303ccbdd9eeSJohn Baldwin 304ccbdd9eeSJohn Baldwin #define ATOMIC_PTR(NAME) \ 305ccbdd9eeSJohn Baldwin static __inline void \ 306ccbdd9eeSJohn Baldwin atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ 307ccbdd9eeSJohn Baldwin { \ 308ccbdd9eeSJohn Baldwin atomic_##NAME##_int((volatile u_int *)p, v); \ 309ccbdd9eeSJohn Baldwin } \ 310ccbdd9eeSJohn Baldwin \ 311ccbdd9eeSJohn Baldwin static __inline void \ 312ccbdd9eeSJohn Baldwin atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ 313ccbdd9eeSJohn Baldwin { \ 314ccbdd9eeSJohn Baldwin atomic_##NAME##_acq_int((volatile u_int *)p, v);\ 315ccbdd9eeSJohn Baldwin } \ 316ccbdd9eeSJohn Baldwin \ 317ccbdd9eeSJohn Baldwin static __inline void \ 318ccbdd9eeSJohn Baldwin atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ 319ccbdd9eeSJohn Baldwin { \ 320ccbdd9eeSJohn Baldwin atomic_##NAME##_rel_int((volatile u_int *)p, v);\ 321ccbdd9eeSJohn Baldwin } 322ccbdd9eeSJohn Baldwin 323ccbdd9eeSJohn Baldwin ATOMIC_PTR(set) 324ccbdd9eeSJohn Baldwin ATOMIC_PTR(clear) 325ccbdd9eeSJohn Baldwin ATOMIC_PTR(add) 326ccbdd9eeSJohn Baldwin ATOMIC_PTR(subtract) 327ccbdd9eeSJohn Baldwin 328ccbdd9eeSJohn Baldwin #undef ATOMIC_PTR 329ccbdd9eeSJohn Baldwin 330b4645202SJohn Baldwin static __inline u_int 331b4645202SJohn Baldwin atomic_readandclear_int(volatile u_int *addr) 332b4645202SJohn Baldwin { 333b4645202SJohn Baldwin u_int result; 334b4645202SJohn Baldwin 335b4645202SJohn Baldwin __asm __volatile ( 336b4645202SJohn Baldwin " xorl %0,%0 ; " 337b4645202SJohn Baldwin " xchgl %1,%0 ; " 338b4645202SJohn Baldwin "# atomic_readandclear_int" 339b4645202SJohn Baldwin : "=&r" (result) /* 0 (result) */ 340b4645202SJohn Baldwin : "m" (*addr)); /* 1 (addr) */ 341b4645202SJohn Baldwin 342b4645202SJohn Baldwin return (result); 343b4645202SJohn Baldwin } 344b4645202SJohn Baldwin 345b4645202SJohn Baldwin static __inline u_long 346b4645202SJohn Baldwin atomic_readandclear_long(volatile u_long *addr) 347b4645202SJohn Baldwin { 348b4645202SJohn Baldwin u_long result; 349b4645202SJohn Baldwin 350b4645202SJohn Baldwin __asm __volatile ( 351b4645202SJohn Baldwin " xorl %0,%0 ; " 352b4645202SJohn Baldwin " xchgl %1,%0 ; " 353b4645202SJohn Baldwin "# atomic_readandclear_int" 354b4645202SJohn Baldwin : "=&r" (result) /* 0 (result) */ 355b4645202SJohn Baldwin : "m" (*addr)); /* 1 (addr) */ 356b4645202SJohn Baldwin 357b4645202SJohn Baldwin return (result); 358b4645202SJohn Baldwin } 359819e370cSPoul-Henning Kamp #endif 360819e370cSPoul-Henning Kamp 361069e9bc1SDoug Rabson #endif /* ! _MACHINE_ATOMIC_H_ */ 362