1 /*- 2 * Copyright (c) 2013 Andrew Turner <andrew@freebsd.org> 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * $FreeBSD$ 27 */ 28 29 #ifndef _MACHINE_ATOMIC_H_ 30 #define _MACHINE_ATOMIC_H_ 31 32 #define isb() __asm __volatile("isb" : : : "memory") 33 34 /* 35 * Options for DMB and DSB: 36 * oshld Outer Shareable, load 37 * oshst Outer Shareable, store 38 * osh Outer Shareable, all 39 * nshld Non-shareable, load 40 * nshst Non-shareable, store 41 * nsh Non-shareable, all 42 * ishld Inner Shareable, load 43 * ishst Inner Shareable, store 44 * ish Inner Shareable, all 45 * ld Full system, load 46 * st Full system, store 47 * sy Full system, all 48 */ 49 #define dsb(opt) __asm __volatile("dsb " __STRING(opt) : : : "memory") 50 #define dmb(opt) __asm __volatile("dmb " __STRING(opt) : : : "memory") 51 52 #define mb() dmb(sy) /* Full system memory barrier all */ 53 #define wmb() dmb(st) /* Full system memory barrier store */ 54 #define rmb() dmb(ld) /* Full system memory barrier load */ 55 56 #ifdef _KERNEL 57 extern _Bool lse_supported; 58 #endif 59 60 #if defined(SAN_NEEDS_INTERCEPTORS) && !defined(SAN_RUNTIME) 61 #include <sys/atomic_san.h> 62 #else 63 64 #include <sys/atomic_common.h> 65 66 #ifdef _KERNEL 67 68 #ifdef LSE_ATOMICS 69 #define _ATOMIC_LSE_SUPPORTED 1 70 #else 71 #define _ATOMIC_LSE_SUPPORTED lse_supported 72 #endif 73 #else 74 #define _ATOMIC_LSE_SUPPORTED 0 75 #endif 76 77 #define _ATOMIC_OP_PROTO(t, op, bar, flav) \ 78 static __inline void \ 79 atomic_##op##_##bar##t##flav(volatile uint##t##_t *p, uint##t##_t val) 80 81 #define _ATOMIC_OP_IMPL(t, w, s, op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \ 82 _ATOMIC_OP_PROTO(t, op, bar, _llsc) \ 83 { \ 84 uint##t##_t tmp; \ 85 int res; \ 86 \ 87 pre; \ 88 __asm __volatile( \ 89 "1: ld"#a"xr"#s" %"#w"0, [%2]\n" \ 90 " "#llsc_asm_op" %"#w"0, %"#w"0, %"#w"3\n" \ 91 " st"#l"xr"#s" %w1, %"#w"0, [%2]\n" \ 92 " cbnz %w1, 1b\n" \ 93 : "=&r"(tmp), "=&r"(res) \ 94 : "r" (p), "r" (val) \ 95 : "memory" \ 96 ); \ 97 } \ 98 \ 99 _ATOMIC_OP_PROTO(t, op, bar, _lse) \ 100 { \ 101 uint##t##_t tmp; \ 102 \ 103 pre; \ 104 __asm __volatile( \ 105 ".arch_extension lse\n" \ 106 "ld"#lse_asm_op#a#l#s" %"#w"2, %"#w"0, [%1]\n" \ 107 ".arch_extension nolse\n" \ 108 : "=r" (tmp) \ 109 : "r" (p), "r" (val) \ 110 : "memory" \ 111 ); \ 112 } \ 113 \ 114 _ATOMIC_OP_PROTO(t, op, bar, ) \ 115 { \ 116 if (_ATOMIC_LSE_SUPPORTED) \ 117 atomic_##op##_##bar##t##_lse(p, val); \ 118 else \ 119 atomic_##op##_##bar##t##_llsc(p, val); \ 120 } 121 122 #define __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \ 123 _ATOMIC_OP_IMPL(8, w, b, op, llsc_asm_op, lse_asm_op, pre, \ 124 bar, a, l) \ 125 _ATOMIC_OP_IMPL(16, w, h, op, llsc_asm_op, lse_asm_op, pre, \ 126 bar, a, l) \ 127 _ATOMIC_OP_IMPL(32, w, , op, llsc_asm_op, lse_asm_op, pre, \ 128 bar, a, l) \ 129 _ATOMIC_OP_IMPL(64, , , op, llsc_asm_op, lse_asm_op, pre, \ 130 bar, a, l) 131 132 #define _ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre) \ 133 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, , , ) \ 134 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, acq_, a, ) \ 135 __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, rel_, , l) 136 137 _ATOMIC_OP(add, add, add, ) 138 _ATOMIC_OP(clear, bic, clr, ) 139 _ATOMIC_OP(set, orr, set, ) 140 _ATOMIC_OP(subtract, add, add, val = -val) 141 142 #define _ATOMIC_CMPSET_PROTO(t, bar, flav) \ 143 static __inline int \ 144 atomic_cmpset_##bar##t##flav(volatile uint##t##_t *p, \ 145 uint##t##_t cmpval, uint##t##_t newval) 146 147 #define _ATOMIC_FCMPSET_PROTO(t, bar, flav) \ 148 static __inline int \ 149 atomic_fcmpset_##bar##t##flav(volatile uint##t##_t *p, \ 150 uint##t##_t *cmpval, uint##t##_t newval) 151 152 #define _ATOMIC_CMPSET_IMPL(t, w, s, bar, a, l) \ 153 _ATOMIC_CMPSET_PROTO(t, bar, _llsc) \ 154 { \ 155 uint##t##_t tmp; \ 156 int res; \ 157 \ 158 __asm __volatile( \ 159 "1: mov %w1, #1\n" \ 160 " ld"#a"xr"#s" %"#w"0, [%2]\n" \ 161 " cmp %"#w"0, %"#w"3\n" \ 162 " b.ne 2f\n" \ 163 " st"#l"xr"#s" %w1, %"#w"4, [%2]\n" \ 164 " cbnz %w1, 1b\n" \ 165 "2:" \ 166 : "=&r"(tmp), "=&r"(res) \ 167 : "r" (p), "r" (cmpval), "r" (newval) \ 168 : "cc", "memory" \ 169 ); \ 170 \ 171 return (!res); \ 172 } \ 173 \ 174 _ATOMIC_CMPSET_PROTO(t, bar, _lse) \ 175 { \ 176 uint##t##_t oldval; \ 177 int res; \ 178 \ 179 oldval = cmpval; \ 180 __asm __volatile( \ 181 ".arch_extension lse\n" \ 182 "cas"#a#l#s" %"#w"1, %"#w"4, [%3]\n" \ 183 "cmp %"#w"1, %"#w"2\n" \ 184 "cset %w0, eq\n" \ 185 ".arch_extension nolse\n" \ 186 : "=r" (res), "+&r" (cmpval) \ 187 : "r" (oldval), "r" (p), "r" (newval) \ 188 : "cc", "memory" \ 189 ); \ 190 \ 191 return (res); \ 192 } \ 193 \ 194 _ATOMIC_CMPSET_PROTO(t, bar, ) \ 195 { \ 196 if (_ATOMIC_LSE_SUPPORTED) \ 197 return (atomic_cmpset_##bar##t##_lse(p, cmpval, \ 198 newval)); \ 199 else \ 200 return (atomic_cmpset_##bar##t##_llsc(p, cmpval, \ 201 newval)); \ 202 } \ 203 \ 204 _ATOMIC_FCMPSET_PROTO(t, bar, _llsc) \ 205 { \ 206 uint##t##_t _cmpval, tmp; \ 207 int res; \ 208 \ 209 _cmpval = *cmpval; \ 210 __asm __volatile( \ 211 " mov %w1, #1\n" \ 212 " ld"#a"xr"#s" %"#w"0, [%2]\n" \ 213 " cmp %"#w"0, %"#w"3\n" \ 214 " b.ne 1f\n" \ 215 " st"#l"xr"#s" %w1, %"#w"4, [%2]\n" \ 216 "1:" \ 217 : "=&r"(tmp), "=&r"(res) \ 218 : "r" (p), "r" (_cmpval), "r" (newval) \ 219 : "cc", "memory" \ 220 ); \ 221 *cmpval = tmp; \ 222 \ 223 return (!res); \ 224 } \ 225 \ 226 _ATOMIC_FCMPSET_PROTO(t, bar, _lse) \ 227 { \ 228 uint##t##_t _cmpval, tmp; \ 229 int res; \ 230 \ 231 _cmpval = tmp = *cmpval; \ 232 __asm __volatile( \ 233 ".arch_extension lse\n" \ 234 "cas"#a#l#s" %"#w"1, %"#w"4, [%3]\n" \ 235 "cmp %"#w"1, %"#w"2\n" \ 236 "cset %w0, eq\n" \ 237 ".arch_extension nolse\n" \ 238 : "=r" (res), "+&r" (tmp) \ 239 : "r" (_cmpval), "r" (p), "r" (newval) \ 240 : "cc", "memory" \ 241 ); \ 242 *cmpval = tmp; \ 243 \ 244 return (res); \ 245 } \ 246 \ 247 _ATOMIC_FCMPSET_PROTO(t, bar, ) \ 248 { \ 249 if (_ATOMIC_LSE_SUPPORTED) \ 250 return (atomic_fcmpset_##bar##t##_lse(p, cmpval, \ 251 newval)); \ 252 else \ 253 return (atomic_fcmpset_##bar##t##_llsc(p, cmpval, \ 254 newval)); \ 255 } 256 257 #define _ATOMIC_CMPSET(bar, a, l) \ 258 _ATOMIC_CMPSET_IMPL(8, w, b, bar, a, l) \ 259 _ATOMIC_CMPSET_IMPL(16, w, h, bar, a, l) \ 260 _ATOMIC_CMPSET_IMPL(32, w, , bar, a, l) \ 261 _ATOMIC_CMPSET_IMPL(64, , , bar, a, l) 262 263 #define atomic_cmpset_8 atomic_cmpset_8 264 #define atomic_fcmpset_8 atomic_fcmpset_8 265 #define atomic_cmpset_16 atomic_cmpset_16 266 #define atomic_fcmpset_16 atomic_fcmpset_16 267 268 _ATOMIC_CMPSET( , , ) 269 _ATOMIC_CMPSET(acq_, a, ) 270 _ATOMIC_CMPSET(rel_, ,l) 271 272 #define _ATOMIC_FETCHADD_PROTO(t, flav) \ 273 static __inline uint##t##_t \ 274 atomic_fetchadd_##t##flav(volatile uint##t##_t *p, uint##t##_t val) 275 276 #define _ATOMIC_FETCHADD_IMPL(t, w) \ 277 _ATOMIC_FETCHADD_PROTO(t, _llsc) \ 278 { \ 279 uint##t##_t ret, tmp; \ 280 int res; \ 281 \ 282 __asm __volatile( \ 283 "1: ldxr %"#w"2, [%3]\n" \ 284 " add %"#w"0, %"#w"2, %"#w"4\n" \ 285 " stxr %w1, %"#w"0, [%3]\n" \ 286 " cbnz %w1, 1b\n" \ 287 : "=&r" (tmp), "=&r" (res), "=&r" (ret) \ 288 : "r" (p), "r" (val) \ 289 : "memory" \ 290 ); \ 291 \ 292 return (ret); \ 293 } \ 294 \ 295 _ATOMIC_FETCHADD_PROTO(t, _lse) \ 296 { \ 297 uint##t##_t ret; \ 298 \ 299 __asm __volatile( \ 300 ".arch_extension lse\n" \ 301 "ldadd %"#w"2, %"#w"0, [%1]\n" \ 302 ".arch_extension nolse\n" \ 303 : "=r" (ret) \ 304 : "r" (p), "r" (val) \ 305 : "memory" \ 306 ); \ 307 \ 308 return (ret); \ 309 } \ 310 \ 311 _ATOMIC_FETCHADD_PROTO(t, ) \ 312 { \ 313 if (_ATOMIC_LSE_SUPPORTED) \ 314 return (atomic_fetchadd_##t##_lse(p, val)); \ 315 else \ 316 return (atomic_fetchadd_##t##_llsc(p, val)); \ 317 } 318 319 _ATOMIC_FETCHADD_IMPL(32, w) 320 _ATOMIC_FETCHADD_IMPL(64, ) 321 322 #define _ATOMIC_SWAP_PROTO(t, flav) \ 323 static __inline uint##t##_t \ 324 atomic_swap_##t##flav(volatile uint##t##_t *p, uint##t##_t val) 325 326 #define _ATOMIC_READANDCLEAR_PROTO(t, flav) \ 327 static __inline uint##t##_t \ 328 atomic_readandclear_##t##flav(volatile uint##t##_t *p) 329 330 #define _ATOMIC_SWAP_IMPL(t, w, zreg) \ 331 _ATOMIC_SWAP_PROTO(t, _llsc) \ 332 { \ 333 uint##t##_t ret; \ 334 int res; \ 335 \ 336 __asm __volatile( \ 337 "1: ldxr %"#w"1, [%2]\n" \ 338 " stxr %w0, %"#w"3, [%2]\n" \ 339 " cbnz %w0, 1b\n" \ 340 : "=&r" (res), "=&r" (ret) \ 341 : "r" (p), "r" (val) \ 342 : "memory" \ 343 ); \ 344 \ 345 return (ret); \ 346 } \ 347 \ 348 _ATOMIC_SWAP_PROTO(t, _lse) \ 349 { \ 350 uint##t##_t ret; \ 351 \ 352 __asm __volatile( \ 353 ".arch_extension lse\n" \ 354 "swp %"#w"2, %"#w"0, [%1]\n" \ 355 ".arch_extension nolse\n" \ 356 : "=r" (ret) \ 357 : "r" (p), "r" (val) \ 358 : "memory" \ 359 ); \ 360 \ 361 return (ret); \ 362 } \ 363 \ 364 _ATOMIC_SWAP_PROTO(t, ) \ 365 { \ 366 if (_ATOMIC_LSE_SUPPORTED) \ 367 return (atomic_swap_##t##_lse(p, val)); \ 368 else \ 369 return (atomic_swap_##t##_llsc(p, val)); \ 370 } \ 371 \ 372 _ATOMIC_READANDCLEAR_PROTO(t, _llsc) \ 373 { \ 374 uint##t##_t ret; \ 375 int res; \ 376 \ 377 __asm __volatile( \ 378 "1: ldxr %"#w"1, [%2]\n" \ 379 " stxr %w0, "#zreg", [%2]\n" \ 380 " cbnz %w0, 1b\n" \ 381 : "=&r" (res), "=&r" (ret) \ 382 : "r" (p) \ 383 : "memory" \ 384 ); \ 385 \ 386 return (ret); \ 387 } \ 388 \ 389 _ATOMIC_READANDCLEAR_PROTO(t, _lse) \ 390 { \ 391 return (atomic_swap_##t##_lse(p, 0)); \ 392 } \ 393 \ 394 _ATOMIC_READANDCLEAR_PROTO(t, ) \ 395 { \ 396 if (_ATOMIC_LSE_SUPPORTED) \ 397 return (atomic_readandclear_##t##_lse(p)); \ 398 else \ 399 return (atomic_readandclear_##t##_llsc(p)); \ 400 } 401 402 _ATOMIC_SWAP_IMPL(32, w, wzr) 403 _ATOMIC_SWAP_IMPL(64, , xzr) 404 405 #define _ATOMIC_TEST_OP_PROTO(t, op, bar, flav) \ 406 static __inline int \ 407 atomic_testand##op##_##bar##t##flav(volatile uint##t##_t *p, u_int val) 408 409 #define _ATOMIC_TEST_OP_IMPL(t, w, op, llsc_asm_op, lse_asm_op, bar, a) \ 410 _ATOMIC_TEST_OP_PROTO(t, op, bar, _llsc) \ 411 { \ 412 uint##t##_t mask, old, tmp; \ 413 int res; \ 414 \ 415 mask = ((uint##t##_t)1) << (val & (t - 1)); \ 416 __asm __volatile( \ 417 "1: ld"#a"xr %"#w"2, [%3]\n" \ 418 " "#llsc_asm_op" %"#w"0, %"#w"2, %"#w"4\n" \ 419 " stxr %w1, %"#w"0, [%3]\n" \ 420 " cbnz %w1, 1b\n" \ 421 : "=&r" (tmp), "=&r" (res), "=&r" (old) \ 422 : "r" (p), "r" (mask) \ 423 : "memory" \ 424 ); \ 425 \ 426 return ((old & mask) != 0); \ 427 } \ 428 \ 429 _ATOMIC_TEST_OP_PROTO(t, op, bar, _lse) \ 430 { \ 431 uint##t##_t mask, old; \ 432 \ 433 mask = ((uint##t##_t)1) << (val & (t - 1)); \ 434 __asm __volatile( \ 435 ".arch_extension lse\n" \ 436 "ld"#lse_asm_op#a" %"#w"2, %"#w"0, [%1]\n" \ 437 ".arch_extension nolse\n" \ 438 : "=r" (old) \ 439 : "r" (p), "r" (mask) \ 440 : "memory" \ 441 ); \ 442 \ 443 return ((old & mask) != 0); \ 444 } \ 445 \ 446 _ATOMIC_TEST_OP_PROTO(t, op, bar, ) \ 447 { \ 448 if (_ATOMIC_LSE_SUPPORTED) \ 449 return (atomic_testand##op##_##bar##t##_lse(p, val)); \ 450 else \ 451 return (atomic_testand##op##_##bar##t##_llsc(p, val)); \ 452 } 453 454 #define _ATOMIC_TEST_OP(op, llsc_asm_op, lse_asm_op) \ 455 _ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op, , ) \ 456 _ATOMIC_TEST_OP_IMPL(32, w, op, llsc_asm_op, lse_asm_op, acq_, a) \ 457 _ATOMIC_TEST_OP_IMPL(64, , op, llsc_asm_op, lse_asm_op, , ) \ 458 _ATOMIC_TEST_OP_IMPL(64, , op, llsc_asm_op, lse_asm_op, acq_, a) 459 460 _ATOMIC_TEST_OP(clear, bic, clr) 461 _ATOMIC_TEST_OP(set, orr, set) 462 463 #define _ATOMIC_LOAD_ACQ_IMPL(t, w, s) \ 464 static __inline uint##t##_t \ 465 atomic_load_acq_##t(volatile uint##t##_t *p) \ 466 { \ 467 uint##t##_t ret; \ 468 \ 469 __asm __volatile( \ 470 "ldar"#s" %"#w"0, [%1]\n" \ 471 : "=&r" (ret) \ 472 : "r" (p) \ 473 : "memory"); \ 474 \ 475 return (ret); \ 476 } 477 478 #define atomic_load_acq_8 atomic_load_acq_8 479 #define atomic_load_acq_16 atomic_load_acq_16 480 _ATOMIC_LOAD_ACQ_IMPL(8, w, b) 481 _ATOMIC_LOAD_ACQ_IMPL(16, w, h) 482 _ATOMIC_LOAD_ACQ_IMPL(32, w, ) 483 _ATOMIC_LOAD_ACQ_IMPL(64, , ) 484 485 #define _ATOMIC_STORE_REL_IMPL(t, w, s) \ 486 static __inline void \ 487 atomic_store_rel_##t(volatile uint##t##_t *p, uint##t##_t val) \ 488 { \ 489 __asm __volatile( \ 490 "stlr"#s" %"#w"0, [%1]\n" \ 491 : \ 492 : "r" (val), "r" (p) \ 493 : "memory"); \ 494 } 495 496 _ATOMIC_STORE_REL_IMPL(8, w, b) 497 _ATOMIC_STORE_REL_IMPL(16, w, h) 498 _ATOMIC_STORE_REL_IMPL(32, w, ) 499 _ATOMIC_STORE_REL_IMPL(64, , ) 500 501 #define atomic_add_char atomic_add_8 502 #define atomic_fcmpset_char atomic_fcmpset_8 503 #define atomic_clear_char atomic_clear_8 504 #define atomic_cmpset_char atomic_cmpset_8 505 #define atomic_fetchadd_char atomic_fetchadd_8 506 #define atomic_readandclear_char atomic_readandclear_8 507 #define atomic_set_char atomic_set_8 508 #define atomic_swap_char atomic_swap_8 509 #define atomic_subtract_char atomic_subtract_8 510 #define atomic_testandclear_char atomic_testandclear_8 511 #define atomic_testandset_char atomic_testandset_8 512 513 #define atomic_add_acq_char atomic_add_acq_8 514 #define atomic_fcmpset_acq_char atomic_fcmpset_acq_8 515 #define atomic_clear_acq_char atomic_clear_acq_8 516 #define atomic_cmpset_acq_char atomic_cmpset_acq_8 517 #define atomic_load_acq_char atomic_load_acq_8 518 #define atomic_set_acq_char atomic_set_acq_8 519 #define atomic_subtract_acq_char atomic_subtract_acq_8 520 #define atomic_testandset_acq_char atomic_testandset_acq_8 521 522 #define atomic_add_rel_char atomic_add_rel_8 523 #define atomic_fcmpset_rel_char atomic_fcmpset_rel_8 524 #define atomic_clear_rel_char atomic_clear_rel_8 525 #define atomic_cmpset_rel_char atomic_cmpset_rel_8 526 #define atomic_set_rel_char atomic_set_rel_8 527 #define atomic_subtract_rel_char atomic_subtract_rel_8 528 #define atomic_store_rel_char atomic_store_rel_8 529 530 #define atomic_add_short atomic_add_16 531 #define atomic_fcmpset_short atomic_fcmpset_16 532 #define atomic_clear_short atomic_clear_16 533 #define atomic_cmpset_short atomic_cmpset_16 534 #define atomic_fetchadd_short atomic_fetchadd_16 535 #define atomic_readandclear_short atomic_readandclear_16 536 #define atomic_set_short atomic_set_16 537 #define atomic_swap_short atomic_swap_16 538 #define atomic_subtract_short atomic_subtract_16 539 #define atomic_testandclear_short atomic_testandclear_16 540 #define atomic_testandset_short atomic_testandset_16 541 542 #define atomic_add_acq_short atomic_add_acq_16 543 #define atomic_fcmpset_acq_short atomic_fcmpset_acq_16 544 #define atomic_clear_acq_short atomic_clear_acq_16 545 #define atomic_cmpset_acq_short atomic_cmpset_acq_16 546 #define atomic_load_acq_short atomic_load_acq_16 547 #define atomic_set_acq_short atomic_set_acq_16 548 #define atomic_subtract_acq_short atomic_subtract_acq_16 549 #define atomic_testandset_acq_short atomic_testandset_acq_16 550 551 #define atomic_add_rel_short atomic_add_rel_16 552 #define atomic_fcmpset_rel_short atomic_fcmpset_rel_16 553 #define atomic_clear_rel_short atomic_clear_rel_16 554 #define atomic_cmpset_rel_short atomic_cmpset_rel_16 555 #define atomic_set_rel_short atomic_set_rel_16 556 #define atomic_subtract_rel_short atomic_subtract_rel_16 557 #define atomic_store_rel_short atomic_store_rel_16 558 559 #define atomic_add_int atomic_add_32 560 #define atomic_fcmpset_int atomic_fcmpset_32 561 #define atomic_clear_int atomic_clear_32 562 #define atomic_cmpset_int atomic_cmpset_32 563 #define atomic_fetchadd_int atomic_fetchadd_32 564 #define atomic_readandclear_int atomic_readandclear_32 565 #define atomic_set_int atomic_set_32 566 #define atomic_swap_int atomic_swap_32 567 #define atomic_subtract_int atomic_subtract_32 568 #define atomic_testandclear_int atomic_testandclear_32 569 #define atomic_testandset_int atomic_testandset_32 570 571 #define atomic_add_acq_int atomic_add_acq_32 572 #define atomic_fcmpset_acq_int atomic_fcmpset_acq_32 573 #define atomic_clear_acq_int atomic_clear_acq_32 574 #define atomic_cmpset_acq_int atomic_cmpset_acq_32 575 #define atomic_load_acq_int atomic_load_acq_32 576 #define atomic_set_acq_int atomic_set_acq_32 577 #define atomic_subtract_acq_int atomic_subtract_acq_32 578 #define atomic_testandset_acq_int atomic_testandset_acq_32 579 580 #define atomic_add_rel_int atomic_add_rel_32 581 #define atomic_fcmpset_rel_int atomic_fcmpset_rel_32 582 #define atomic_clear_rel_int atomic_clear_rel_32 583 #define atomic_cmpset_rel_int atomic_cmpset_rel_32 584 #define atomic_set_rel_int atomic_set_rel_32 585 #define atomic_subtract_rel_int atomic_subtract_rel_32 586 #define atomic_store_rel_int atomic_store_rel_32 587 588 #define atomic_add_long atomic_add_64 589 #define atomic_fcmpset_long atomic_fcmpset_64 590 #define atomic_clear_long atomic_clear_64 591 #define atomic_cmpset_long atomic_cmpset_64 592 #define atomic_fetchadd_long atomic_fetchadd_64 593 #define atomic_readandclear_long atomic_readandclear_64 594 #define atomic_set_long atomic_set_64 595 #define atomic_swap_long atomic_swap_64 596 #define atomic_subtract_long atomic_subtract_64 597 #define atomic_testandclear_long atomic_testandclear_64 598 #define atomic_testandset_long atomic_testandset_64 599 600 #define atomic_add_ptr atomic_add_64 601 #define atomic_fcmpset_ptr atomic_fcmpset_64 602 #define atomic_clear_ptr atomic_clear_64 603 #define atomic_cmpset_ptr atomic_cmpset_64 604 #define atomic_fetchadd_ptr atomic_fetchadd_64 605 #define atomic_readandclear_ptr atomic_readandclear_64 606 #define atomic_set_ptr atomic_set_64 607 #define atomic_swap_ptr atomic_swap_64 608 #define atomic_subtract_ptr atomic_subtract_64 609 610 #define atomic_add_acq_long atomic_add_acq_64 611 #define atomic_fcmpset_acq_long atomic_fcmpset_acq_64 612 #define atomic_clear_acq_long atomic_clear_acq_64 613 #define atomic_cmpset_acq_long atomic_cmpset_acq_64 614 #define atomic_load_acq_long atomic_load_acq_64 615 #define atomic_set_acq_long atomic_set_acq_64 616 #define atomic_subtract_acq_long atomic_subtract_acq_64 617 #define atomic_testandset_acq_long atomic_testandset_acq_64 618 619 #define atomic_add_acq_ptr atomic_add_acq_64 620 #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_64 621 #define atomic_clear_acq_ptr atomic_clear_acq_64 622 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_64 623 #define atomic_load_acq_ptr atomic_load_acq_64 624 #define atomic_set_acq_ptr atomic_set_acq_64 625 #define atomic_subtract_acq_ptr atomic_subtract_acq_64 626 627 #define atomic_add_rel_long atomic_add_rel_64 628 #define atomic_fcmpset_rel_long atomic_fcmpset_rel_64 629 #define atomic_clear_rel_long atomic_clear_rel_64 630 #define atomic_cmpset_rel_long atomic_cmpset_rel_64 631 #define atomic_set_rel_long atomic_set_rel_64 632 #define atomic_subtract_rel_long atomic_subtract_rel_64 633 #define atomic_store_rel_long atomic_store_rel_64 634 635 #define atomic_add_rel_ptr atomic_add_rel_64 636 #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_64 637 #define atomic_clear_rel_ptr atomic_clear_rel_64 638 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_64 639 #define atomic_set_rel_ptr atomic_set_rel_64 640 #define atomic_subtract_rel_ptr atomic_subtract_rel_64 641 #define atomic_store_rel_ptr atomic_store_rel_64 642 643 static __inline void 644 atomic_thread_fence_acq(void) 645 { 646 647 dmb(ld); 648 } 649 650 static __inline void 651 atomic_thread_fence_rel(void) 652 { 653 654 dmb(sy); 655 } 656 657 static __inline void 658 atomic_thread_fence_acq_rel(void) 659 { 660 661 dmb(sy); 662 } 663 664 static __inline void 665 atomic_thread_fence_seq_cst(void) 666 { 667 668 dmb(sy); 669 } 670 671 #endif /* KCSAN && !KCSAN_RUNTIME */ 672 #endif /* _MACHINE_ATOMIC_H_ */ 673