1 /*- 2 * Copyright (c) 2013 Andrew Turner <andrew@freebsd.org> 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * $FreeBSD$ 27 */ 28 29 #ifdef __arm__ 30 #include <arm/atomic.h> 31 #else /* !__arm__ */ 32 33 #ifndef _MACHINE_ATOMIC_H_ 34 #define _MACHINE_ATOMIC_H_ 35 36 #define isb() __asm __volatile("isb" : : : "memory") 37 38 /* 39 * Options for DMB and DSB: 40 * oshld Outer Shareable, load 41 * oshst Outer Shareable, store 42 * osh Outer Shareable, all 43 * nshld Non-shareable, load 44 * nshst Non-shareable, store 45 * nsh Non-shareable, all 46 * ishld Inner Shareable, load 47 * ishst Inner Shareable, store 48 * ish Inner Shareable, all 49 * ld Full system, load 50 * st Full system, store 51 * sy Full system, all 52 */ 53 #define dsb(opt) __asm __volatile("dsb " __STRING(opt) : : : "memory") 54 #define dmb(opt) __asm __volatile("dmb " __STRING(opt) : : : "memory") 55 56 #define mb() dmb(sy) /* Full system memory barrier all */ 57 #define wmb() dmb(st) /* Full system memory barrier store */ 58 #define rmb() dmb(ld) /* Full system memory barrier load */ 59 60 #ifdef _KERNEL 61 extern _Bool lse_supported; 62 #endif 63 64 #if defined(SAN_NEEDS_INTERCEPTORS) && !defined(SAN_RUNTIME) 65 #include <sys/atomic_san.h> 66 #else 67 68 #include <sys/atomic_common.h> 69 70 #ifdef _KERNEL 71 72 #ifdef LSE_ATOMICS 73 #define _ATOMIC_LSE_SUPPORTED 1 74 #else 75 #define _ATOMIC_LSE_SUPPORTED lse_supported 76 #endif 77 #else 78 #define _ATOMIC_LSE_SUPPORTED 0 79 #endif 80 81 #define _ATOMIC_OP_PROTO(t, op, bar, flav) \ 82 static __inline void \ 83 atomic_##op##_##bar##t##flav(volatile uint##t##_t *p, uint##t##_t val) 84 85 #define _ATOMIC_OP_IMPL(t, w, s, op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \ 86 _ATOMIC_OP_PROTO(t, op, bar, _llsc) \ 87 { \ 88 uint##t##_t tmp; \ 89 int res; \ 90 \ 91 pre; \ 92 __asm __volatile( \ 93 "1: ld"#a"xr"#s" %"#w"0, [%2]\n" \ 94 " "#llsc_asm_op" %"#w"0, %"#w"0, %"#w"3\n" \ 95 " st"#l"xr"#s" %w1, %"#w"0, [%2]\n" \ 96 " cbnz %w1, 1b\n" \ 97 : "=&r"(tmp), "=&r"(res) \ 98 : "r" (p), "r" (val) \ 99 : "memory" \ 100 ); \ 101 } \ 102 \ 103 _ATOMIC_OP_PROTO(t, op, bar, _lse) \ 104 { \ 105 uint##t##_t tmp; \ 106 \ 107 pre; \ 108 __asm __volatile( \ 109 ".arch_extension lse\n" \ 110 "ld"#lse_asm_op#a#l#s" %"#w"2, %"#w"0, [%1]\n" \ 111 ".arch_extension nolse\n" \ 112 : "=r" (tmp) \ 113 : "r" (p), "r" (val) \ 114 : "memory" \ 115 ); \ 116 } \ 117 \ 118 _ATOMIC_OP_PROTO(t, op, bar, ) \ 119 { \ 120 if (_ATOMIC_LSE_SUPPORTED) \ 121 atomic_##op##_##bar##t##_lse(p, val); \ 122 else \ 123 atomic_##op##_##bar##t##_llsc(p, val); \ 124 } 125 126 #define __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \ 127 _ATOMIC_OP_IMPL(8, w, b, op, llsc_asm_op, lse_asm_op, pre, \ 128 bar, a, l) \ 129 _ATOMIC_OP_IMPL(16, w, h, op, llsc_asm_op, lse_asm_op, pre, \ 130 bar, a, l) \ 131 _ATOMIC_OP_IMPL(32, w, , op, llsc_asm_op, lse_asm_op, pre, \ 132 bar, a, l) \ 133 _ATOMIC_OP_IMPL(64, , , op, llsc_asm_op, lse_asm_op, pre, \ 134 bar, a, l) 135 136 #define _ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre) \ 137 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, , , ) \ 138 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, acq_, a, ) \ 139 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, rel_, , l) 140 141 _ATOMIC_OP(add, add, add, ) 142 _ATOMIC_OP(clear, bic, clr, ) 143 _ATOMIC_OP(set, orr, set, ) 144 _ATOMIC_OP(subtract, add, add, val = -val) 145 146 #define _ATOMIC_CMPSET_PROTO(t, bar, flav) \ 147 static __inline int \ 148 atomic_cmpset_##bar##t##flav(volatile uint##t##_t *p, \ 149 uint##t##_t cmpval, uint##t##_t newval) 150 151 #define _ATOMIC_FCMPSET_PROTO(t, bar, flav) \ 152 static __inline int \ 153 atomic_fcmpset_##bar##t##flav(volatile uint##t##_t *p, \ 154 uint##t##_t *cmpval, uint##t##_t newval) 155 156 #define _ATOMIC_CMPSET_IMPL(t, w, s, bar, a, l) \ 157 _ATOMIC_CMPSET_PROTO(t, bar, _llsc) \ 158 { \ 159 uint##t##_t tmp; \ 160 int res; \ 161 \ 162 __asm __volatile( \ 163 "1: mov %w1, #1\n" \ 164 " ld"#a"xr"#s" %"#w"0, [%2]\n" \ 165 " cmp %"#w"0, %"#w"3\n" \ 166 " b.ne 2f\n" \ 167 " st"#l"xr"#s" %w1, %"#w"4, [%2]\n" \ 168 " cbnz %w1, 1b\n" \ 169 "2:" \ 170 : "=&r"(tmp), "=&r"(res) \ 171 : "r" (p), "r" (cmpval), "r" (newval) \ 172 : "cc", "memory" \ 173 ); \ 174 \ 175 return (!res); \ 176 } \ 177 \ 178 _ATOMIC_CMPSET_PROTO(t, bar, _lse) \ 179 { \ 180 uint##t##_t oldval; \ 181 int res; \ 182 \ 183 oldval = cmpval; \ 184 __asm __volatile( \ 185 ".arch_extension lse\n" \ 186 "cas"#a#l#s" %"#w"1, %"#w"4, [%3]\n" \ 187 "cmp %"#w"1, %"#w"2\n" \ 188 "cset %w0, eq\n" \ 189 ".arch_extension nolse\n" \ 190 : "=r" (res), "+&r" (cmpval) \ 191 : "r" (oldval), "r" (p), "r" (newval) \ 192 : "cc", "memory" \ 193 ); \ 194 \ 195 return (res); \ 196 } \ 197 \ 198 _ATOMIC_CMPSET_PROTO(t, bar, ) \ 199 { \ 200 if (_ATOMIC_LSE_SUPPORTED) \ 201 return (atomic_cmpset_##bar##t##_lse(p, cmpval, \ 202 newval)); \ 203 else \ 204 return (atomic_cmpset_##bar##t##_llsc(p, cmpval, \ 205 newval)); \ 206 } \ 207 \ 208 _ATOMIC_FCMPSET_PROTO(t, bar, _llsc) \ 209 { \ 210 uint##t##_t _cmpval, tmp; \ 211 int res; \ 212 \ 213 _cmpval = *cmpval; \ 214 __asm __volatile( \ 215 " mov %w1, #1\n" \ 216 " ld"#a"xr"#s" %"#w"0, [%2]\n" \ 217 " cmp %"#w"0, %"#w"3\n" \ 218 " b.ne 1f\n" \ 219 " st"#l"xr"#s" %w1, %"#w"4, [%2]\n" \ 220 "1:" \ 221 : "=&r"(tmp), "=&r"(res) \ 222 : "r" (p), "r" (_cmpval), "r" (newval) \ 223 : "cc", "memory" \ 224 ); \ 225 *cmpval = tmp; \ 226 \ 227 return (!res); \ 228 } \ 229 \ 230 _ATOMIC_FCMPSET_PROTO(t, bar, _lse) \ 231 { \ 232 uint##t##_t _cmpval, tmp; \ 233 int res; \ 234 \ 235 _cmpval = tmp = *cmpval; \ 236 __asm __volatile( \ 237 ".arch_extension lse\n" \ 238 "cas"#a#l#s" %"#w"1, %"#w"4, [%3]\n" \ 239 "cmp %"#w"1, %"#w"2\n" \ 240 "cset %w0, eq\n" \ 241 ".arch_extension nolse\n" \ 242 : "=r" (res), "+&r" (tmp) \ 243 : "r" (_cmpval), "r" (p), "r" (newval) \ 244 : "cc", "memory" \ 245 ); \ 246 *cmpval = tmp; \ 247 \ 248 return (res); \ 249 } \ 250 \ 251 _ATOMIC_FCMPSET_PROTO(t, bar, ) \ 252 { \ 253 if (_ATOMIC_LSE_SUPPORTED) \ 254 return (atomic_fcmpset_##bar##t##_lse(p, cmpval, \ 255 newval)); \ 256 else \ 257 return (atomic_fcmpset_##bar##t##_llsc(p, cmpval, \ 258 newval)); \ 259 } 260 261 #define _ATOMIC_CMPSET(bar, a, l) \ 262 _ATOMIC_CMPSET_IMPL(8, w, b, bar, a, l) \ 263 _ATOMIC_CMPSET_IMPL(16, w, h, bar, a, l) \ 264 _ATOMIC_CMPSET_IMPL(32, w, , bar, a, l) \ 265 _ATOMIC_CMPSET_IMPL(64, , , bar, a, l) 266 267 #define atomic_cmpset_8 atomic_cmpset_8 268 #define atomic_fcmpset_8 atomic_fcmpset_8 269 #define atomic_cmpset_16 atomic_cmpset_16 270 #define atomic_fcmpset_16 atomic_fcmpset_16 271 272 _ATOMIC_CMPSET( , , ) 273 _ATOMIC_CMPSET(acq_, a, ) 274 _ATOMIC_CMPSET(rel_, ,l) 275 276 #define _ATOMIC_FETCHADD_PROTO(t, flav) \ 277 static __inline uint##t##_t \ 278 atomic_fetchadd_##t##flav(volatile uint##t##_t *p, uint##t##_t val) 279 280 #define _ATOMIC_FETCHADD_IMPL(t, w) \ 281 _ATOMIC_FETCHADD_PROTO(t, _llsc) \ 282 { \ 283 uint##t##_t ret, tmp; \ 284 int res; \ 285 \ 286 __asm __volatile( \ 287 "1: ldxr %"#w"2, [%3]\n" \ 288 " add %"#w"0, %"#w"2, %"#w"4\n" \ 289 " stxr %w1, %"#w"0, [%3]\n" \ 290 " cbnz %w1, 1b\n" \ 291 : "=&r" (tmp), "=&r" (res), "=&r" (ret) \ 292 : "r" (p), "r" (val) \ 293 : "memory" \ 294 ); \ 295 \ 296 return (ret); \ 297 } \ 298 \ 299 _ATOMIC_FETCHADD_PROTO(t, _lse) \ 300 { \ 301 uint##t##_t ret; \ 302 \ 303 __asm __volatile( \ 304 ".arch_extension lse\n" \ 305 "ldadd %"#w"2, %"#w"0, [%1]\n" \ 306 ".arch_extension nolse\n" \ 307 : "=r" (ret) \ 308 : "r" (p), "r" (val) \ 309 : "memory" \ 310 ); \ 311 \ 312 return (ret); \ 313 } \ 314 \ 315 _ATOMIC_FETCHADD_PROTO(t, ) \ 316 { \ 317 if (_ATOMIC_LSE_SUPPORTED) \ 318 return (atomic_fetchadd_##t##_lse(p, val)); \ 319 else \ 320 return (atomic_fetchadd_##t##_llsc(p, val)); \ 321 } 322 323 _ATOMIC_FETCHADD_IMPL(32, w) 324 _ATOMIC_FETCHADD_IMPL(64, ) 325 326 #define _ATOMIC_SWAP_PROTO(t, flav) \ 327 static __inline uint##t##_t \ 328 atomic_swap_##t##flav(volatile uint##t##_t *p, uint##t##_t val) 329 330 #define _ATOMIC_READANDCLEAR_PROTO(t, flav) \ 331 static __inline uint##t##_t \ 332 atomic_readandclear_##t##flav(volatile uint##t##_t *p) 333 334 #define _ATOMIC_SWAP_IMPL(t, w, zreg) \ 335 _ATOMIC_SWAP_PROTO(t, _llsc) \ 336 { \ 337 uint##t##_t ret; \ 338 int res; \ 339 \ 340 __asm __volatile( \ 341 "1: ldxr %"#w"1, [%2]\n" \ 342 " stxr %w0, %"#w"3, [%2]\n" \ 343 " cbnz %w0, 1b\n" \ 344 : "=&r" (res), "=&r" (ret) \ 345 : "r" (p), "r" (val) \ 346 : "memory" \ 347 ); \ 348 \ 349 return (ret); \ 350 } \ 351 \ 352 _ATOMIC_SWAP_PROTO(t, _lse) \ 353 { \ 354 uint##t##_t ret; \ 355 \ 356 __asm __volatile( \ 357 ".arch_extension lse\n" \ 358 "swp %"#w"2, %"#w"0, [%1]\n" \ 359 ".arch_extension nolse\n" \ 360 : "=r" (ret) \ 361 : "r" (p), "r" (val) \ 362 : "memory" \ 363 ); \ 364 \ 365 return (ret); \ 366 } \ 367 \ 368 _ATOMIC_SWAP_PROTO(t, ) \ 369 { \ 370 if (_ATOMIC_LSE_SUPPORTED) \ 371 return (atomic_swap_##t##_lse(p, val)); \ 372 else \ 373 return (atomic_swap_##t##_llsc(p, val)); \ 374 } \ 375 \ 376 _ATOMIC_READANDCLEAR_PROTO(t, _llsc) \ 377 { \ 378 uint##t##_t ret; \ 379 int res; \ 380 \ 381 __asm __volatile( \ 382 "1: ldxr %"#w"1, [%2]\n" \ 383 " stxr %w0, "#zreg", [%2]\n" \ 384 " cbnz %w0, 1b\n" \ 385 : "=&r" (res), "=&r" (ret) \ 386 : "r" (p) \ 387 : "memory" \ 388 ); \ 389 \ 390 return (ret); \ 391 } \ 392 \ 393 _ATOMIC_READANDCLEAR_PROTO(t, _lse) \ 394 { \ 395 return (atomic_swap_##t##_lse(p, 0)); \ 396 } \ 397 \ 398 _ATOMIC_READANDCLEAR_PROTO(t, ) \ 399 { \ 400 if (_ATOMIC_LSE_SUPPORTED) \ 401 return (atomic_readandclear_##t##_lse(p)); \ 402 else \ 403 return (atomic_readandclear_##t##_llsc(p)); \ 404 } 405 406 _ATOMIC_SWAP_IMPL(32, w, wzr) 407 _ATOMIC_SWAP_IMPL(64, , xzr) 408 409 #define _ATOMIC_TEST_OP_PROTO(t, op, bar, flav) \ 410 static __inline int \ 411 atomic_testand##op##_##bar##t##flav(volatile uint##t##_t *p, u_int val) 412 413 #define _ATOMIC_TEST_OP_IMPL(t, w, op, llsc_asm_op, lse_asm_op, bar, a) \ 414 _ATOMIC_TEST_OP_PROTO(t, op, bar, _llsc) \ 415 { \ 416 uint##t##_t mask, old, tmp; \ 417 int res; \ 418 \ 419 mask = ((uint##t##_t)1) << (val & (t - 1)); \ 420 __asm __volatile( \ 421 "1: ld"#a"xr %"#w"2, [%3]\n" \ 422 " "#llsc_asm_op" %"#w"0, %"#w"2, %"#w"4\n" \ 423 " stxr %w1, %"#w"0, [%3]\n" \ 424 " cbnz %w1, 1b\n" \ 425 : "=&r" (tmp), "=&r" (res), "=&r" (old) \ 426 : "r" (p), "r" (mask) \ 427 : "memory" \ 428 ); \ 429 \ 430 return ((old & mask) != 0); \ 431 } \ 432 \ 433 _ATOMIC_TEST_OP_PROTO(t, op, bar, _lse) \ 434 { \ 435 uint##t##_t mask, old; \ 436 \ 437 mask = ((uint##t##_t)1) << (val & (t - 1)); \ 438 __asm __volatile( \ 439 ".arch_extension lse\n" \ 440 "ld"#lse_asm_op#a" %"#w"2, %"#w"0, [%1]\n" \ 441 ".arch_extension nolse\n" \ 442 : "=r" (old) \ 443 : "r" (p), "r" (mask) \ 444 : "memory" \ 445 ); \ 446 \ 447 return ((old & mask) != 0); \ 448 } \ 449 \ 450 _ATOMIC_TEST_OP_PROTO(t, op, bar, ) \ 451 { \ 452 if (_ATOMIC_LSE_SUPPORTED) \ 453 return (atomic_testand##op##_##bar##t##_lse(p, val)); \ 454 else \ 455 return (atomic_testand##op##_##bar##t##_llsc(p, val)); \ 456 } 457 458 #define _ATOMIC_TEST_OP(op, llsc_asm_op, lse_asm_op) \ 459 _ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op, , ) \ 460 _ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op, acq_, a) \ 461 _ATOMIC_TEST_OP_IMPL(64, , op, llsc_asm_op, lse_asm_op, , ) \ 462 _ATOMIC_TEST_OP_IMPL(64, , op, llsc_asm_op, lse_asm_op, acq_, a) 463 464 _ATOMIC_TEST_OP(clear, bic, clr) 465 _ATOMIC_TEST_OP(set, orr, set) 466 467 #define _ATOMIC_LOAD_ACQ_IMPL(t, w, s) \ 468 static __inline uint##t##_t \ 469 atomic_load_acq_##t(volatile uint##t##_t *p) \ 470 { \ 471 uint##t##_t ret; \ 472 \ 473 __asm __volatile( \ 474 "ldar"#s" %"#w"0, [%1]\n" \ 475 : "=&r" (ret) \ 476 : "r" (p) \ 477 : "memory"); \ 478 \ 479 return (ret); \ 480 } 481 482 #define atomic_load_acq_8 atomic_load_acq_8 483 #define atomic_load_acq_16 atomic_load_acq_16 484 _ATOMIC_LOAD_ACQ_IMPL(8, w, b) 485 _ATOMIC_LOAD_ACQ_IMPL(16, w, h) 486 _ATOMIC_LOAD_ACQ_IMPL(32, w, ) 487 _ATOMIC_LOAD_ACQ_IMPL(64, , ) 488 489 #define _ATOMIC_STORE_REL_IMPL(t, w, s) \ 490 static __inline void \ 491 atomic_store_rel_##t(volatile uint##t##_t *p, uint##t##_t val) \ 492 { \ 493 __asm __volatile( \ 494 "stlr"#s" %"#w"0, [%1]\n" \ 495 : \ 496 : "r" (val), "r" (p) \ 497 : "memory"); \ 498 } 499 500 _ATOMIC_STORE_REL_IMPL(8, w, b) 501 _ATOMIC_STORE_REL_IMPL(16, w, h) 502 _ATOMIC_STORE_REL_IMPL(32, w, ) 503 _ATOMIC_STORE_REL_IMPL(64, , ) 504 505 #define atomic_add_char atomic_add_8 506 #define atomic_fcmpset_char atomic_fcmpset_8 507 #define atomic_clear_char atomic_clear_8 508 #define atomic_cmpset_char atomic_cmpset_8 509 #define atomic_fetchadd_char atomic_fetchadd_8 510 #define atomic_readandclear_char atomic_readandclear_8 511 #define atomic_set_char atomic_set_8 512 #define atomic_swap_char atomic_swap_8 513 #define atomic_subtract_char atomic_subtract_8 514 #define atomic_testandclear_char atomic_testandclear_8 515 #define atomic_testandset_char atomic_testandset_8 516 517 #define atomic_add_acq_char atomic_add_acq_8 518 #define atomic_fcmpset_acq_char atomic_fcmpset_acq_8 519 #define atomic_clear_acq_char atomic_clear_acq_8 520 #define atomic_cmpset_acq_char atomic_cmpset_acq_8 521 #define atomic_load_acq_char atomic_load_acq_8 522 #define atomic_set_acq_char atomic_set_acq_8 523 #define atomic_subtract_acq_char atomic_subtract_acq_8 524 #define atomic_testandset_acq_char atomic_testandset_acq_8 525 526 #define atomic_add_rel_char atomic_add_rel_8 527 #define atomic_fcmpset_rel_char atomic_fcmpset_rel_8 528 #define atomic_clear_rel_char atomic_clear_rel_8 529 #define atomic_cmpset_rel_char atomic_cmpset_rel_8 530 #define atomic_set_rel_char atomic_set_rel_8 531 #define atomic_subtract_rel_char atomic_subtract_rel_8 532 #define atomic_store_rel_char atomic_store_rel_8 533 534 #define atomic_add_short atomic_add_16 535 #define atomic_fcmpset_short atomic_fcmpset_16 536 #define atomic_clear_short atomic_clear_16 537 #define atomic_cmpset_short atomic_cmpset_16 538 #define atomic_fetchadd_short atomic_fetchadd_16 539 #define atomic_readandclear_short atomic_readandclear_16 540 #define atomic_set_short atomic_set_16 541 #define atomic_swap_short atomic_swap_16 542 #define atomic_subtract_short atomic_subtract_16 543 #define atomic_testandclear_short atomic_testandclear_16 544 #define atomic_testandset_short atomic_testandset_16 545 546 #define atomic_add_acq_short atomic_add_acq_16 547 #define atomic_fcmpset_acq_short atomic_fcmpset_acq_16 548 #define atomic_clear_acq_short atomic_clear_acq_16 549 #define atomic_cmpset_acq_short atomic_cmpset_acq_16 550 #define atomic_load_acq_short atomic_load_acq_16 551 #define atomic_set_acq_short atomic_set_acq_16 552 #define atomic_subtract_acq_short atomic_subtract_acq_16 553 #define atomic_testandset_acq_short atomic_testandset_acq_16 554 555 #define atomic_add_rel_short atomic_add_rel_16 556 #define atomic_fcmpset_rel_short atomic_fcmpset_rel_16 557 #define atomic_clear_rel_short atomic_clear_rel_16 558 #define atomic_cmpset_rel_short atomic_cmpset_rel_16 559 #define atomic_set_rel_short atomic_set_rel_16 560 #define atomic_subtract_rel_short atomic_subtract_rel_16 561 #define atomic_store_rel_short atomic_store_rel_16 562 563 #define atomic_add_int atomic_add_32 564 #define atomic_fcmpset_int atomic_fcmpset_32 565 #define atomic_clear_int atomic_clear_32 566 #define atomic_cmpset_int atomic_cmpset_32 567 #define atomic_fetchadd_int atomic_fetchadd_32 568 #define atomic_readandclear_int atomic_readandclear_32 569 #define atomic_set_int atomic_set_32 570 #define atomic_swap_int atomic_swap_32 571 #define atomic_subtract_int atomic_subtract_32 572 #define atomic_testandclear_int atomic_testandclear_32 573 #define atomic_testandset_int atomic_testandset_32 574 575 #define atomic_add_acq_int atomic_add_acq_32 576 #define atomic_fcmpset_acq_int atomic_fcmpset_acq_32 577 #define atomic_clear_acq_int atomic_clear_acq_32 578 #define atomic_cmpset_acq_int atomic_cmpset_acq_32 579 #define atomic_load_acq_int atomic_load_acq_32 580 #define atomic_set_acq_int atomic_set_acq_32 581 #define atomic_subtract_acq_int atomic_subtract_acq_32 582 #define atomic_testandset_acq_int atomic_testandset_acq_32 583 584 #define atomic_add_rel_int atomic_add_rel_32 585 #define atomic_fcmpset_rel_int atomic_fcmpset_rel_32 586 #define atomic_clear_rel_int atomic_clear_rel_32 587 #define atomic_cmpset_rel_int atomic_cmpset_rel_32 588 #define atomic_set_rel_int atomic_set_rel_32 589 #define atomic_subtract_rel_int atomic_subtract_rel_32 590 #define atomic_store_rel_int atomic_store_rel_32 591 592 #define atomic_add_long atomic_add_64 593 #define atomic_fcmpset_long atomic_fcmpset_64 594 #define atomic_clear_long atomic_clear_64 595 #define atomic_cmpset_long atomic_cmpset_64 596 #define atomic_fetchadd_long atomic_fetchadd_64 597 #define atomic_readandclear_long atomic_readandclear_64 598 #define atomic_set_long atomic_set_64 599 #define atomic_swap_long atomic_swap_64 600 #define atomic_subtract_long atomic_subtract_64 601 #define atomic_testandclear_long atomic_testandclear_64 602 #define atomic_testandset_long atomic_testandset_64 603 604 #define atomic_add_ptr atomic_add_64 605 #define atomic_fcmpset_ptr atomic_fcmpset_64 606 #define atomic_clear_ptr atomic_clear_64 607 #define atomic_cmpset_ptr atomic_cmpset_64 608 #define atomic_fetchadd_ptr atomic_fetchadd_64 609 #define atomic_readandclear_ptr atomic_readandclear_64 610 #define atomic_set_ptr atomic_set_64 611 #define atomic_swap_ptr atomic_swap_64 612 #define atomic_subtract_ptr atomic_subtract_64 613 614 #define atomic_add_acq_long atomic_add_acq_64 615 #define atomic_fcmpset_acq_long atomic_fcmpset_acq_64 616 #define atomic_clear_acq_long atomic_clear_acq_64 617 #define atomic_cmpset_acq_long atomic_cmpset_acq_64 618 #define atomic_load_acq_long atomic_load_acq_64 619 #define atomic_set_acq_long atomic_set_acq_64 620 #define atomic_subtract_acq_long atomic_subtract_acq_64 621 #define atomic_testandset_acq_long atomic_testandset_acq_64 622 623 #define atomic_add_acq_ptr atomic_add_acq_64 624 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64 625 #define atomic_clear_acq_ptr atomic_clear_acq_64 626 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_64 627 #define atomic_load_acq_ptr atomic_load_acq_64 628 #define atomic_set_acq_ptr atomic_set_acq_64 629 #define atomic_subtract_acq_ptr atomic_subtract_acq_64 630 631 #define atomic_add_rel_long atomic_add_rel_64 632 #define atomic_fcmpset_rel_long atomic_fcmpset_rel_64 633 #define atomic_clear_rel_long atomic_clear_rel_64 634 #define atomic_cmpset_rel_long atomic_cmpset_rel_64 635 #define atomic_set_rel_long atomic_set_rel_64 636 #define atomic_subtract_rel_long atomic_subtract_rel_64 637 #define atomic_store_rel_long atomic_store_rel_64 638 639 #define atomic_add_rel_ptr atomic_add_rel_64 640 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64 641 #define atomic_clear_rel_ptr atomic_clear_rel_64 642 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_64 643 #define atomic_set_rel_ptr atomic_set_rel_64 644 #define atomic_subtract_rel_ptr atomic_subtract_rel_64 645 #define atomic_store_rel_ptr atomic_store_rel_64 646 647 static __inline void 648 atomic_thread_fence_acq(void) 649 { 650 651 dmb(ld); 652 } 653 654 static __inline void 655 atomic_thread_fence_rel(void) 656 { 657 658 dmb(sy); 659 } 660 661 static __inline void 662 atomic_thread_fence_acq_rel(void) 663 { 664 665 dmb(sy); 666 } 667 668 static __inline void 669 atomic_thread_fence_seq_cst(void) 670 { 671 672 dmb(sy); 673 } 674 675 #endif /* KCSAN && !KCSAN_RUNTIME */ 676 #endif /* _MACHINE_ATOMIC_H_ */ 677 678 #endif /* !__arm__ */ 679