1 /*- 2 * Copyright (c) 1998 Doug Rabson 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * $FreeBSD$ 27 */ 28 #ifndef _MACHINE_ATOMIC_H_ 29 #define _MACHINE_ATOMIC_H_ 30 31 #ifndef _SYS_CDEFS_H_ 32 #error this file needs sys/cdefs.h as a prerequisite 33 #endif 34 35 /* 36 * Various simple arithmetic on memory which is atomic in the presence 37 * of interrupts and multiple processors. 38 * 39 * atomic_set_char(P, V) (*(u_char*)(P) |= (V)) 40 * atomic_clear_char(P, V) (*(u_char*)(P) &= ~(V)) 41 * atomic_add_char(P, V) (*(u_char*)(P) += (V)) 42 * atomic_subtract_char(P, V) (*(u_char*)(P) -= (V)) 43 * 44 * atomic_set_short(P, V) (*(u_short*)(P) |= (V)) 45 * atomic_clear_short(P, V) (*(u_short*)(P) &= ~(V)) 46 * atomic_add_short(P, V) (*(u_short*)(P) += (V)) 47 * atomic_subtract_short(P, V) (*(u_short*)(P) -= (V)) 48 * 49 * atomic_set_int(P, V) (*(u_int*)(P) |= (V)) 50 * atomic_clear_int(P, V) (*(u_int*)(P) &= ~(V)) 51 * atomic_add_int(P, V) (*(u_int*)(P) += (V)) 52 * atomic_subtract_int(P, V) (*(u_int*)(P) -= (V)) 53 * atomic_readandclear_int(P) (return *(u_int*)P; *(u_int*)P = 0;) 54 * 55 * atomic_set_long(P, V) (*(u_long*)(P) |= (V)) 56 * atomic_clear_long(P, V) (*(u_long*)(P) &= ~(V)) 57 * atomic_add_long(P, V) (*(u_long*)(P) += (V)) 58 * atomic_subtract_long(P, V) (*(u_long*)(P) -= (V)) 59 * atomic_readandclear_long(P) (return *(u_long*)P; *(u_long*)P = 0;) 60 */ 61 62 /* 63 * The above functions are expanded inline in the statically-linked 64 * kernel. Lock prefixes are generated if an SMP kernel is being 65 * built. 66 * 67 * Kernel modules call real functions which are built into the kernel. 68 * This allows kernel modules to be portable between UP and SMP systems. 69 */ 70 #if defined(KLD_MODULE) || !(defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)) 71 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 72 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 73 74 int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 75 int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src); 76 77 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 78 u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 79 void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 80 81 #else /* !KLD_MODULE && __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */ 82 83 /* 84 * For userland, assume the SMP case and use lock prefixes so that 85 * the binaries will run on both types of systems. 86 */ 87 #if defined(SMP) || !defined(_KERNEL) 88 #define MPLOCKED lock ; 89 #else 90 #define MPLOCKED 91 #endif 92 93 /* 94 * The assembly is volatilized to demark potential before-and-after side 95 * effects if an interrupt or SMP collision were to occur. 96 */ 97 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 98 static __inline void \ 99 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 100 { \ 101 __asm __volatile(__XSTRING(MPLOCKED) OP \ 102 : "+m" (*p) \ 103 : CONS (V)); \ 104 } \ 105 struct __hack 106 107 /* 108 * Atomic compare and set, used by the mutex functions 109 * 110 * if (*dst == exp) *dst = src (all 32 bit words) 111 * 112 * Returns 0 on failure, non-zero on success 113 */ 114 115 static __inline int 116 atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 117 { 118 int res = exp; 119 120 __asm __volatile ( 121 " " __XSTRING(MPLOCKED) " " 122 " cmpxchgl %1,%2 ; " 123 " setz %%al ; " 124 " movzbl %%al,%0 ; " 125 "1: " 126 "# atomic_cmpset_int" 127 : "+a" (res) /* 0 (result) */ 128 : "r" (src), /* 1 */ 129 "m" (*(dst)) /* 2 */ 130 : "memory"); 131 132 return (res); 133 } 134 135 static __inline int 136 atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src) 137 { 138 long res = exp; 139 140 __asm __volatile ( 141 " " __XSTRING(MPLOCKED) " " 142 " cmpxchgq %1,%2 ; " 143 " setz %%al ; " 144 " movzbq %%al,%0 ; " 145 "1: " 146 "# atomic_cmpset_long" 147 : "+a" (res) /* 0 (result) */ 148 : "r" (src), /* 1 */ 149 "m" (*(dst)) /* 2 */ 150 : "memory"); 151 152 return (res); 153 } 154 155 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 156 static __inline u_##TYPE \ 157 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 158 { \ 159 u_##TYPE res; \ 160 \ 161 __asm __volatile(__XSTRING(MPLOCKED) LOP \ 162 : "=a" (res), /* 0 (result) */\ 163 "+m" (*p) /* 1 */ \ 164 : : "memory"); \ 165 \ 166 return (res); \ 167 } \ 168 \ 169 /* \ 170 * The XCHG instruction asserts LOCK automagically. \ 171 */ \ 172 static __inline void \ 173 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 174 { \ 175 __asm __volatile(SOP \ 176 : "+m" (*p), /* 0 */ \ 177 "+r" (v) /* 1 */ \ 178 : : "memory"); \ 179 } \ 180 struct __hack 181 182 #endif /* KLD_MODULE || !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */ 183 184 ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 185 ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 186 ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 187 ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 188 189 ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 190 ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 191 ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 192 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 193 194 ATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 195 ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 196 ATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 197 ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 198 199 ATOMIC_ASM(set, long, "orq %1,%0", "ir", v); 200 ATOMIC_ASM(clear, long, "andq %1,%0", "ir", ~v); 201 ATOMIC_ASM(add, long, "addq %1,%0", "ir", v); 202 ATOMIC_ASM(subtract, long, "subq %1,%0", "ir", v); 203 204 ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0"); 205 ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0"); 206 ATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0"); 207 ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0"); 208 209 #undef ATOMIC_ASM 210 #undef ATOMIC_STORE_LOAD 211 212 #if !defined(WANT_FUNCTIONS) 213 214 /* Read the current value and store a zero in the destination. */ 215 #if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE) 216 217 static __inline u_int 218 atomic_readandclear_int(volatile u_int *addr) 219 { 220 u_int result; 221 222 __asm __volatile ( 223 " xorl %0,%0 ; " 224 " xchgl %1,%0 ; " 225 "# atomic_readandclear_int" 226 : "=&r" (result) /* 0 (result) */ 227 : "m" (*addr)); /* 1 (addr) */ 228 229 return (result); 230 } 231 232 static __inline u_long 233 atomic_readandclear_long(volatile u_long *addr) 234 { 235 u_long result; 236 237 __asm __volatile ( 238 " xorq %0,%0 ; " 239 " xchgq %1,%0 ; " 240 "# atomic_readandclear_long" 241 : "=&r" (result) /* 0 (result) */ 242 : "m" (*addr)); /* 1 (addr) */ 243 244 return (result); 245 } 246 247 #else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */ 248 249 u_int atomic_readandclear_int(volatile u_int *); 250 u_long atomic_readandclear_long(volatile u_long *); 251 252 #endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */ 253 254 /* Acquire and release variants are identical to the normal ones. */ 255 #define atomic_set_acq_char atomic_set_char 256 #define atomic_set_rel_char atomic_set_char 257 #define atomic_clear_acq_char atomic_clear_char 258 #define atomic_clear_rel_char atomic_clear_char 259 #define atomic_add_acq_char atomic_add_char 260 #define atomic_add_rel_char atomic_add_char 261 #define atomic_subtract_acq_char atomic_subtract_char 262 #define atomic_subtract_rel_char atomic_subtract_char 263 264 #define atomic_set_acq_short atomic_set_short 265 #define atomic_set_rel_short atomic_set_short 266 #define atomic_clear_acq_short atomic_clear_short 267 #define atomic_clear_rel_short atomic_clear_short 268 #define atomic_add_acq_short atomic_add_short 269 #define atomic_add_rel_short atomic_add_short 270 #define atomic_subtract_acq_short atomic_subtract_short 271 #define atomic_subtract_rel_short atomic_subtract_short 272 273 #define atomic_set_acq_int atomic_set_int 274 #define atomic_set_rel_int atomic_set_int 275 #define atomic_clear_acq_int atomic_clear_int 276 #define atomic_clear_rel_int atomic_clear_int 277 #define atomic_add_acq_int atomic_add_int 278 #define atomic_add_rel_int atomic_add_int 279 #define atomic_subtract_acq_int atomic_subtract_int 280 #define atomic_subtract_rel_int atomic_subtract_int 281 #define atomic_cmpset_acq_int atomic_cmpset_int 282 #define atomic_cmpset_rel_int atomic_cmpset_int 283 284 #define atomic_set_acq_long atomic_set_long 285 #define atomic_set_rel_long atomic_set_long 286 #define atomic_clear_acq_long atomic_clear_long 287 #define atomic_clear_rel_long atomic_clear_long 288 #define atomic_add_acq_long atomic_add_long 289 #define atomic_add_rel_long atomic_add_long 290 #define atomic_subtract_acq_long atomic_subtract_long 291 #define atomic_subtract_rel_long atomic_subtract_long 292 #define atomic_cmpset_acq_long atomic_cmpset_long 293 #define atomic_cmpset_rel_long atomic_cmpset_long 294 295 /* Operations on 8-bit bytes. */ 296 #define atomic_set_8 atomic_set_char 297 #define atomic_set_acq_8 atomic_set_acq_char 298 #define atomic_set_rel_8 atomic_set_rel_char 299 #define atomic_clear_8 atomic_clear_char 300 #define atomic_clear_acq_8 atomic_clear_acq_char 301 #define atomic_clear_rel_8 atomic_clear_rel_char 302 #define atomic_add_8 atomic_add_char 303 #define atomic_add_acq_8 atomic_add_acq_char 304 #define atomic_add_rel_8 atomic_add_rel_char 305 #define atomic_subtract_8 atomic_subtract_char 306 #define atomic_subtract_acq_8 atomic_subtract_acq_char 307 #define atomic_subtract_rel_8 atomic_subtract_rel_char 308 #define atomic_load_acq_8 atomic_load_acq_char 309 #define atomic_store_rel_8 atomic_store_rel_char 310 311 /* Operations on 16-bit words. */ 312 #define atomic_set_16 atomic_set_short 313 #define atomic_set_acq_16 atomic_set_acq_short 314 #define atomic_set_rel_16 atomic_set_rel_short 315 #define atomic_clear_16 atomic_clear_short 316 #define atomic_clear_acq_16 atomic_clear_acq_short 317 #define atomic_clear_rel_16 atomic_clear_rel_short 318 #define atomic_add_16 atomic_add_short 319 #define atomic_add_acq_16 atomic_add_acq_short 320 #define atomic_add_rel_16 atomic_add_rel_short 321 #define atomic_subtract_16 atomic_subtract_short 322 #define atomic_subtract_acq_16 atomic_subtract_acq_short 323 #define atomic_subtract_rel_16 atomic_subtract_rel_short 324 #define atomic_load_acq_16 atomic_load_acq_short 325 #define atomic_store_rel_16 atomic_store_rel_short 326 327 /* Operations on 32-bit double words. */ 328 #define atomic_set_32 atomic_set_int 329 #define atomic_set_acq_32 atomic_set_acq_int 330 #define atomic_set_rel_32 atomic_set_rel_int 331 #define atomic_clear_32 atomic_clear_int 332 #define atomic_clear_acq_32 atomic_clear_acq_int 333 #define atomic_clear_rel_32 atomic_clear_rel_int 334 #define atomic_add_32 atomic_add_int 335 #define atomic_add_acq_32 atomic_add_acq_int 336 #define atomic_add_rel_32 atomic_add_rel_int 337 #define atomic_subtract_32 atomic_subtract_int 338 #define atomic_subtract_acq_32 atomic_subtract_acq_int 339 #define atomic_subtract_rel_32 atomic_subtract_rel_int 340 #define atomic_load_acq_32 atomic_load_acq_int 341 #define atomic_store_rel_32 atomic_store_rel_int 342 #define atomic_cmpset_32 atomic_cmpset_int 343 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 344 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 345 #define atomic_readandclear_32 atomic_readandclear_int 346 347 /* Operations on pointers. */ 348 #define atomic_set_ptr atomic_set_long 349 #define atomic_set_acq_ptr atomic_set_acq_long 350 #define atomic_set_rel_ptr atomic_set_rel_long 351 #define atomic_clear_ptr atomic_clear_long 352 #define atomic_clear_acq_ptr atomic_clear_acq_long 353 #define atomic_clear_rel_ptr atomic_clear_rel_long 354 #define atomic_add_ptr atomic_add_long 355 #define atomic_add_acq_ptr atomic_add_acq_long 356 #define atomic_add_rel_ptr atomic_add_rel_long 357 #define atomic_subtract_ptr atomic_subtract_long 358 #define atomic_subtract_acq_ptr atomic_subtract_acq_long 359 #define atomic_subtract_rel_ptr atomic_subtract_rel_long 360 #define atomic_load_acq_ptr atomic_load_acq_long 361 #define atomic_store_rel_ptr atomic_store_rel_long 362 #define atomic_cmpset_ptr atomic_cmpset_long 363 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long 364 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long 365 #define atomic_readandclear_ptr atomic_readandclear_long 366 367 #endif /* !defined(WANT_FUNCTIONS) */ 368 #endif /* ! _MACHINE_ATOMIC_H_ */ 369