1 /*- 2 * Copyright (c) 2008 Marcel Moolenaar 3 * Copyright (c) 2001 Benno Rice 4 * Copyright (c) 2001 David E. O'Brien 5 * Copyright (c) 1998 Doug Rabson 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice, this list of conditions and the following disclaimer. 13 * 2. Redistributions in binary form must reproduce the above copyright 14 * notice, this list of conditions and the following disclaimer in the 15 * documentation and/or other materials provided with the distribution. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 27 * SUCH DAMAGE. 28 * 29 * $FreeBSD$ 30 */ 31 32 #ifndef _MACHINE_ATOMIC_H_ 33 #define _MACHINE_ATOMIC_H_ 34 35 #ifndef _SYS_CDEFS_H_ 36 #error this file needs sys/cdefs.h as a prerequisite 37 #endif 38 39 /* 40 * The __ATOMIC_REL/ACQ() macros provide memory barriers only in conjunction 41 * with the atomic lXarx/stXcx. sequences below. They are not exposed outside 42 * of this file. See also Appendix B.2 of Book II of the architecture manual. 43 * 44 * Note that not all Book-E processors accept the light-weight sync variant. 45 * In particular, early models of E500 cores are known to wedge. Bank on all 46 * 64-bit capable CPUs to accept lwsync properly and pressimize 32-bit CPUs 47 * to use the heavier-weight sync. 48 */ 49 50 #ifdef __powerpc64__ 51 #define mb() __asm __volatile("sync" : : : "memory") 52 #define rmb() __asm __volatile("lwsync" : : : "memory") 53 #define wmb() __asm __volatile("lwsync" : : : "memory") 54 #define __ATOMIC_REL() __asm __volatile("lwsync" : : : "memory") 55 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory") 56 #else 57 #define mb() __asm __volatile("sync" : : : "memory") 58 #define rmb() __asm __volatile("sync" : : : "memory") 59 #define wmb() __asm __volatile("sync" : : : "memory") 60 #define __ATOMIC_REL() __asm __volatile("sync" : : : "memory") 61 #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory") 62 #endif 63 64 static __inline void 65 powerpc_lwsync(void) 66 { 67 68 #ifdef __powerpc64__ 69 __asm __volatile("lwsync" : : : "memory"); 70 #else 71 __asm __volatile("sync" : : : "memory"); 72 #endif 73 } 74 75 /* 76 * atomic_add(p, v) 77 * { *p += v; } 78 */ 79 80 #define __atomic_add_int(p, v, t) \ 81 __asm __volatile( \ 82 "1: lwarx %0, 0, %2\n" \ 83 " add %0, %3, %0\n" \ 84 " stwcx. %0, 0, %2\n" \ 85 " bne- 1b\n" \ 86 : "=&r" (t), "=m" (*p) \ 87 : "r" (p), "r" (v), "m" (*p) \ 88 : "cr0", "memory") \ 89 /* __atomic_add_int */ 90 91 #ifdef __powerpc64__ 92 #define __atomic_add_long(p, v, t) \ 93 __asm __volatile( \ 94 "1: ldarx %0, 0, %2\n" \ 95 " add %0, %3, %0\n" \ 96 " stdcx. %0, 0, %2\n" \ 97 " bne- 1b\n" \ 98 : "=&r" (t), "=m" (*p) \ 99 : "r" (p), "r" (v), "m" (*p) \ 100 : "cr0", "memory") \ 101 /* __atomic_add_long */ 102 #else 103 #define __atomic_add_long(p, v, t) \ 104 __asm __volatile( \ 105 "1: lwarx %0, 0, %2\n" \ 106 " add %0, %3, %0\n" \ 107 " stwcx. %0, 0, %2\n" \ 108 " bne- 1b\n" \ 109 : "=&r" (t), "=m" (*p) \ 110 : "r" (p), "r" (v), "m" (*p) \ 111 : "cr0", "memory") \ 112 /* __atomic_add_long */ 113 #endif 114 115 #define _ATOMIC_ADD(type) \ 116 static __inline void \ 117 atomic_add_##type(volatile u_##type *p, u_##type v) { \ 118 u_##type t; \ 119 __atomic_add_##type(p, v, t); \ 120 } \ 121 \ 122 static __inline void \ 123 atomic_add_acq_##type(volatile u_##type *p, u_##type v) { \ 124 u_##type t; \ 125 __atomic_add_##type(p, v, t); \ 126 __ATOMIC_ACQ(); \ 127 } \ 128 \ 129 static __inline void \ 130 atomic_add_rel_##type(volatile u_##type *p, u_##type v) { \ 131 u_##type t; \ 132 __ATOMIC_REL(); \ 133 __atomic_add_##type(p, v, t); \ 134 } \ 135 /* _ATOMIC_ADD */ 136 137 _ATOMIC_ADD(int) 138 _ATOMIC_ADD(long) 139 140 #define atomic_add_32 atomic_add_int 141 #define atomic_add_acq_32 atomic_add_acq_int 142 #define atomic_add_rel_32 atomic_add_rel_int 143 144 #ifdef __powerpc64__ 145 #define atomic_add_64 atomic_add_long 146 #define atomic_add_acq_64 atomic_add_acq_long 147 #define atomic_add_rel_64 atomic_add_rel_long 148 149 #define atomic_add_ptr atomic_add_long 150 #define atomic_add_acq_ptr atomic_add_acq_long 151 #define atomic_add_rel_ptr atomic_add_rel_long 152 #else 153 #define atomic_add_ptr atomic_add_int 154 #define atomic_add_acq_ptr atomic_add_acq_int 155 #define atomic_add_rel_ptr atomic_add_rel_int 156 #endif 157 #undef _ATOMIC_ADD 158 #undef __atomic_add_long 159 #undef __atomic_add_int 160 161 /* 162 * atomic_clear(p, v) 163 * { *p &= ~v; } 164 */ 165 166 #define __atomic_clear_int(p, v, t) \ 167 __asm __volatile( \ 168 "1: lwarx %0, 0, %2\n" \ 169 " andc %0, %0, %3\n" \ 170 " stwcx. %0, 0, %2\n" \ 171 " bne- 1b\n" \ 172 : "=&r" (t), "=m" (*p) \ 173 : "r" (p), "r" (v), "m" (*p) \ 174 : "cr0", "memory") \ 175 /* __atomic_clear_int */ 176 177 #ifdef __powerpc64__ 178 #define __atomic_clear_long(p, v, t) \ 179 __asm __volatile( \ 180 "1: ldarx %0, 0, %2\n" \ 181 " andc %0, %0, %3\n" \ 182 " stdcx. %0, 0, %2\n" \ 183 " bne- 1b\n" \ 184 : "=&r" (t), "=m" (*p) \ 185 : "r" (p), "r" (v), "m" (*p) \ 186 : "cr0", "memory") \ 187 /* __atomic_clear_long */ 188 #else 189 #define __atomic_clear_long(p, v, t) \ 190 __asm __volatile( \ 191 "1: lwarx %0, 0, %2\n" \ 192 " andc %0, %0, %3\n" \ 193 " stwcx. %0, 0, %2\n" \ 194 " bne- 1b\n" \ 195 : "=&r" (t), "=m" (*p) \ 196 : "r" (p), "r" (v), "m" (*p) \ 197 : "cr0", "memory") \ 198 /* __atomic_clear_long */ 199 #endif 200 201 #define _ATOMIC_CLEAR(type) \ 202 static __inline void \ 203 atomic_clear_##type(volatile u_##type *p, u_##type v) { \ 204 u_##type t; \ 205 __atomic_clear_##type(p, v, t); \ 206 } \ 207 \ 208 static __inline void \ 209 atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \ 210 u_##type t; \ 211 __atomic_clear_##type(p, v, t); \ 212 __ATOMIC_ACQ(); \ 213 } \ 214 \ 215 static __inline void \ 216 atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \ 217 u_##type t; \ 218 __ATOMIC_REL(); \ 219 __atomic_clear_##type(p, v, t); \ 220 } \ 221 /* _ATOMIC_CLEAR */ 222 223 224 _ATOMIC_CLEAR(int) 225 _ATOMIC_CLEAR(long) 226 227 #define atomic_clear_32 atomic_clear_int 228 #define atomic_clear_acq_32 atomic_clear_acq_int 229 #define atomic_clear_rel_32 atomic_clear_rel_int 230 231 #ifdef __powerpc64__ 232 #define atomic_clear_64 atomic_clear_long 233 #define atomic_clear_acq_64 atomic_clear_acq_long 234 #define atomic_clear_rel_64 atomic_clear_rel_long 235 236 #define atomic_clear_ptr atomic_clear_long 237 #define atomic_clear_acq_ptr atomic_clear_acq_long 238 #define atomic_clear_rel_ptr atomic_clear_rel_long 239 #else 240 #define atomic_clear_ptr atomic_clear_int 241 #define atomic_clear_acq_ptr atomic_clear_acq_int 242 #define atomic_clear_rel_ptr atomic_clear_rel_int 243 #endif 244 #undef _ATOMIC_CLEAR 245 #undef __atomic_clear_long 246 #undef __atomic_clear_int 247 248 /* 249 * atomic_cmpset(p, o, n) 250 */ 251 /* TODO -- see below */ 252 253 /* 254 * atomic_load_acq(p) 255 */ 256 /* TODO -- see below */ 257 258 /* 259 * atomic_readandclear(p) 260 */ 261 /* TODO -- see below */ 262 263 /* 264 * atomic_set(p, v) 265 * { *p |= v; } 266 */ 267 268 #define __atomic_set_int(p, v, t) \ 269 __asm __volatile( \ 270 "1: lwarx %0, 0, %2\n" \ 271 " or %0, %3, %0\n" \ 272 " stwcx. %0, 0, %2\n" \ 273 " bne- 1b\n" \ 274 : "=&r" (t), "=m" (*p) \ 275 : "r" (p), "r" (v), "m" (*p) \ 276 : "cr0", "memory") \ 277 /* __atomic_set_int */ 278 279 #ifdef __powerpc64__ 280 #define __atomic_set_long(p, v, t) \ 281 __asm __volatile( \ 282 "1: ldarx %0, 0, %2\n" \ 283 " or %0, %3, %0\n" \ 284 " stdcx. %0, 0, %2\n" \ 285 " bne- 1b\n" \ 286 : "=&r" (t), "=m" (*p) \ 287 : "r" (p), "r" (v), "m" (*p) \ 288 : "cr0", "memory") \ 289 /* __atomic_set_long */ 290 #else 291 #define __atomic_set_long(p, v, t) \ 292 __asm __volatile( \ 293 "1: lwarx %0, 0, %2\n" \ 294 " or %0, %3, %0\n" \ 295 " stwcx. %0, 0, %2\n" \ 296 " bne- 1b\n" \ 297 : "=&r" (t), "=m" (*p) \ 298 : "r" (p), "r" (v), "m" (*p) \ 299 : "cr0", "memory") \ 300 /* __atomic_set_long */ 301 #endif 302 303 #define _ATOMIC_SET(type) \ 304 static __inline void \ 305 atomic_set_##type(volatile u_##type *p, u_##type v) { \ 306 u_##type t; \ 307 __atomic_set_##type(p, v, t); \ 308 } \ 309 \ 310 static __inline void \ 311 atomic_set_acq_##type(volatile u_##type *p, u_##type v) { \ 312 u_##type t; \ 313 __atomic_set_##type(p, v, t); \ 314 __ATOMIC_ACQ(); \ 315 } \ 316 \ 317 static __inline void \ 318 atomic_set_rel_##type(volatile u_##type *p, u_##type v) { \ 319 u_##type t; \ 320 __ATOMIC_REL(); \ 321 __atomic_set_##type(p, v, t); \ 322 } \ 323 /* _ATOMIC_SET */ 324 325 _ATOMIC_SET(int) 326 _ATOMIC_SET(long) 327 328 #define atomic_set_32 atomic_set_int 329 #define atomic_set_acq_32 atomic_set_acq_int 330 #define atomic_set_rel_32 atomic_set_rel_int 331 332 #ifdef __powerpc64__ 333 #define atomic_set_64 atomic_set_long 334 #define atomic_set_acq_64 atomic_set_acq_long 335 #define atomic_set_rel_64 atomic_set_rel_long 336 337 #define atomic_set_ptr atomic_set_long 338 #define atomic_set_acq_ptr atomic_set_acq_long 339 #define atomic_set_rel_ptr atomic_set_rel_long 340 #else 341 #define atomic_set_ptr atomic_set_int 342 #define atomic_set_acq_ptr atomic_set_acq_int 343 #define atomic_set_rel_ptr atomic_set_rel_int 344 #endif 345 #undef _ATOMIC_SET 346 #undef __atomic_set_long 347 #undef __atomic_set_int 348 349 /* 350 * atomic_subtract(p, v) 351 * { *p -= v; } 352 */ 353 354 #define __atomic_subtract_int(p, v, t) \ 355 __asm __volatile( \ 356 "1: lwarx %0, 0, %2\n" \ 357 " subf %0, %3, %0\n" \ 358 " stwcx. %0, 0, %2\n" \ 359 " bne- 1b\n" \ 360 : "=&r" (t), "=m" (*p) \ 361 : "r" (p), "r" (v), "m" (*p) \ 362 : "cr0", "memory") \ 363 /* __atomic_subtract_int */ 364 365 #ifdef __powerpc64__ 366 #define __atomic_subtract_long(p, v, t) \ 367 __asm __volatile( \ 368 "1: ldarx %0, 0, %2\n" \ 369 " subf %0, %3, %0\n" \ 370 " stdcx. %0, 0, %2\n" \ 371 " bne- 1b\n" \ 372 : "=&r" (t), "=m" (*p) \ 373 : "r" (p), "r" (v), "m" (*p) \ 374 : "cr0", "memory") \ 375 /* __atomic_subtract_long */ 376 #else 377 #define __atomic_subtract_long(p, v, t) \ 378 __asm __volatile( \ 379 "1: lwarx %0, 0, %2\n" \ 380 " subf %0, %3, %0\n" \ 381 " stwcx. %0, 0, %2\n" \ 382 " bne- 1b\n" \ 383 : "=&r" (t), "=m" (*p) \ 384 : "r" (p), "r" (v), "m" (*p) \ 385 : "cr0", "memory") \ 386 /* __atomic_subtract_long */ 387 #endif 388 389 #define _ATOMIC_SUBTRACT(type) \ 390 static __inline void \ 391 atomic_subtract_##type(volatile u_##type *p, u_##type v) { \ 392 u_##type t; \ 393 __atomic_subtract_##type(p, v, t); \ 394 } \ 395 \ 396 static __inline void \ 397 atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) { \ 398 u_##type t; \ 399 __atomic_subtract_##type(p, v, t); \ 400 __ATOMIC_ACQ(); \ 401 } \ 402 \ 403 static __inline void \ 404 atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) { \ 405 u_##type t; \ 406 __ATOMIC_REL(); \ 407 __atomic_subtract_##type(p, v, t); \ 408 } \ 409 /* _ATOMIC_SUBTRACT */ 410 411 _ATOMIC_SUBTRACT(int) 412 _ATOMIC_SUBTRACT(long) 413 414 #define atomic_subtract_32 atomic_subtract_int 415 #define atomic_subtract_acq_32 atomic_subtract_acq_int 416 #define atomic_subtract_rel_32 atomic_subtract_rel_int 417 418 #ifdef __powerpc64__ 419 #define atomic_subtract_64 atomic_subtract_long 420 #define atomic_subtract_acq_64 atomic_subract_acq_long 421 #define atomic_subtract_rel_64 atomic_subtract_rel_long 422 423 #define atomic_subtract_ptr atomic_subtract_long 424 #define atomic_subtract_acq_ptr atomic_subtract_acq_long 425 #define atomic_subtract_rel_ptr atomic_subtract_rel_long 426 #else 427 #define atomic_subtract_ptr atomic_subtract_int 428 #define atomic_subtract_acq_ptr atomic_subtract_acq_int 429 #define atomic_subtract_rel_ptr atomic_subtract_rel_int 430 #endif 431 #undef _ATOMIC_SUBTRACT 432 #undef __atomic_subtract_long 433 #undef __atomic_subtract_int 434 435 /* 436 * atomic_store_rel(p, v) 437 */ 438 /* TODO -- see below */ 439 440 /* 441 * Old/original implementations that still need revisiting. 442 */ 443 444 static __inline u_int 445 atomic_readandclear_int(volatile u_int *addr) 446 { 447 u_int result,temp; 448 449 #ifdef __GNUCLIKE_ASM 450 __asm __volatile ( 451 "\tsync\n" /* drain writes */ 452 "1:\tlwarx %0, 0, %3\n\t" /* load old value */ 453 "li %1, 0\n\t" /* load new value */ 454 "stwcx. %1, 0, %3\n\t" /* attempt to store */ 455 "bne- 1b\n\t" /* spin if failed */ 456 : "=&r"(result), "=&r"(temp), "=m" (*addr) 457 : "r" (addr), "m" (*addr) 458 : "cr0", "memory"); 459 #endif 460 461 return (result); 462 } 463 464 #ifdef __powerpc64__ 465 static __inline u_long 466 atomic_readandclear_long(volatile u_long *addr) 467 { 468 u_long result,temp; 469 470 #ifdef __GNUCLIKE_ASM 471 __asm __volatile ( 472 "\tsync\n" /* drain writes */ 473 "1:\tldarx %0, 0, %3\n\t" /* load old value */ 474 "li %1, 0\n\t" /* load new value */ 475 "stdcx. %1, 0, %3\n\t" /* attempt to store */ 476 "bne- 1b\n\t" /* spin if failed */ 477 : "=&r"(result), "=&r"(temp), "=m" (*addr) 478 : "r" (addr), "m" (*addr) 479 : "cr0", "memory"); 480 #endif 481 482 return (result); 483 } 484 #endif 485 486 #define atomic_readandclear_32 atomic_readandclear_int 487 488 #ifdef __powerpc64__ 489 #define atomic_readandclear_64 atomic_readandclear_long 490 491 #define atomic_readandclear_ptr atomic_readandclear_long 492 #else 493 static __inline u_long 494 atomic_readandclear_long(volatile u_long *addr) 495 { 496 497 return ((u_long)atomic_readandclear_int((volatile u_int *)addr)); 498 } 499 500 #define atomic_readandclear_ptr atomic_readandclear_int 501 #endif 502 503 /* 504 * We assume that a = b will do atomic loads and stores. 505 */ 506 #define ATOMIC_STORE_LOAD(TYPE) \ 507 static __inline u_##TYPE \ 508 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 509 { \ 510 u_##TYPE v; \ 511 \ 512 v = *p; \ 513 mb(); \ 514 return (v); \ 515 } \ 516 \ 517 static __inline void \ 518 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) \ 519 { \ 520 \ 521 powerpc_lwsync(); \ 522 *p = v; \ 523 } 524 525 ATOMIC_STORE_LOAD(int) 526 527 #define atomic_load_acq_32 atomic_load_acq_int 528 #define atomic_store_rel_32 atomic_store_rel_int 529 530 #ifdef __powerpc64__ 531 ATOMIC_STORE_LOAD(long) 532 533 #define atomic_load_acq_64 atomic_load_acq_long 534 #define atomic_store_rel_64 atomic_store_rel_long 535 536 #define atomic_load_acq_ptr atomic_load_acq_long 537 #define atomic_store_rel_ptr atomic_store_rel_long 538 #else 539 static __inline u_long 540 atomic_load_acq_long(volatile u_long *addr) 541 { 542 543 return ((u_long)atomic_load_acq_int((volatile u_int *)addr)); 544 } 545 546 static __inline void 547 atomic_store_rel_long(volatile u_long *addr, u_long val) 548 { 549 550 atomic_store_rel_int((volatile u_int *)addr, (u_int)val); 551 } 552 553 #define atomic_load_acq_ptr atomic_load_acq_int 554 #define atomic_store_rel_ptr atomic_store_rel_int 555 #endif 556 #undef ATOMIC_STORE_LOAD 557 558 /* 559 * Atomically compare the value stored at *p with cmpval and if the 560 * two values are equal, update the value of *p with newval. Returns 561 * zero if the compare failed, nonzero otherwise. 562 */ 563 static __inline int 564 atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval) 565 { 566 int ret; 567 568 #ifdef __GNUCLIKE_ASM 569 __asm __volatile ( 570 "1:\tlwarx %0, 0, %2\n\t" /* load old value */ 571 "cmplw %3, %0\n\t" /* compare */ 572 "bne 2f\n\t" /* exit if not equal */ 573 "stwcx. %4, 0, %2\n\t" /* attempt to store */ 574 "bne- 1b\n\t" /* spin if failed */ 575 "li %0, 1\n\t" /* success - retval = 1 */ 576 "b 3f\n\t" /* we've succeeded */ 577 "2:\n\t" 578 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */ 579 "li %0, 0\n\t" /* failure - retval = 0 */ 580 "3:\n\t" 581 : "=&r" (ret), "=m" (*p) 582 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p) 583 : "cr0", "memory"); 584 #endif 585 586 return (ret); 587 } 588 static __inline int 589 atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval) 590 { 591 int ret; 592 593 #ifdef __GNUCLIKE_ASM 594 __asm __volatile ( 595 #ifdef __powerpc64__ 596 "1:\tldarx %0, 0, %2\n\t" /* load old value */ 597 "cmpld %3, %0\n\t" /* compare */ 598 "bne 2f\n\t" /* exit if not equal */ 599 "stdcx. %4, 0, %2\n\t" /* attempt to store */ 600 #else 601 "1:\tlwarx %0, 0, %2\n\t" /* load old value */ 602 "cmplw %3, %0\n\t" /* compare */ 603 "bne 2f\n\t" /* exit if not equal */ 604 "stwcx. %4, 0, %2\n\t" /* attempt to store */ 605 #endif 606 "bne- 1b\n\t" /* spin if failed */ 607 "li %0, 1\n\t" /* success - retval = 1 */ 608 "b 3f\n\t" /* we've succeeded */ 609 "2:\n\t" 610 #ifdef __powerpc64__ 611 "stdcx. %0, 0, %2\n\t" /* clear reservation (74xx) */ 612 #else 613 "stwcx. %0, 0, %2\n\t" /* clear reservation (74xx) */ 614 #endif 615 "li %0, 0\n\t" /* failure - retval = 0 */ 616 "3:\n\t" 617 : "=&r" (ret), "=m" (*p) 618 : "r" (p), "r" (cmpval), "r" (newval), "m" (*p) 619 : "cr0", "memory"); 620 #endif 621 622 return (ret); 623 } 624 625 static __inline int 626 atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval) 627 { 628 int retval; 629 630 retval = atomic_cmpset_int(p, cmpval, newval); 631 __ATOMIC_ACQ(); 632 return (retval); 633 } 634 635 static __inline int 636 atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval) 637 { 638 __ATOMIC_REL(); 639 return (atomic_cmpset_int(p, cmpval, newval)); 640 } 641 642 static __inline int 643 atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval) 644 { 645 u_long retval; 646 647 retval = atomic_cmpset_long(p, cmpval, newval); 648 __ATOMIC_ACQ(); 649 return (retval); 650 } 651 652 static __inline int 653 atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval) 654 { 655 __ATOMIC_REL(); 656 return (atomic_cmpset_long(p, cmpval, newval)); 657 } 658 659 #define atomic_cmpset_32 atomic_cmpset_int 660 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int 661 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int 662 663 #ifdef __powerpc64__ 664 #define atomic_cmpset_64 atomic_cmpset_long 665 #define atomic_cmpset_acq_64 atomic_cmpset_acq_long 666 #define atomic_cmpset_rel_64 atomic_cmpset_rel_long 667 668 #define atomic_cmpset_ptr atomic_cmpset_long 669 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long 670 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long 671 #else 672 #define atomic_cmpset_ptr atomic_cmpset_int 673 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_int 674 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_int 675 #endif 676 677 static __inline u_int 678 atomic_fetchadd_int(volatile u_int *p, u_int v) 679 { 680 u_int value; 681 682 do { 683 value = *p; 684 } while (!atomic_cmpset_int(p, value, value + v)); 685 return (value); 686 } 687 688 static __inline u_long 689 atomic_fetchadd_long(volatile u_long *p, u_long v) 690 { 691 u_long value; 692 693 do { 694 value = *p; 695 } while (!atomic_cmpset_long(p, value, value + v)); 696 return (value); 697 } 698 699 static __inline u_int 700 atomic_swap_32(volatile u_int *p, u_int v) 701 { 702 u_int prev; 703 704 __asm __volatile( 705 "1: lwarx %0,0,%2\n" 706 " stwcx. %3,0,%2\n" 707 " bne- 1b\n" 708 : "=&r" (prev), "+m" (*(volatile u_int *)p) 709 : "r" (p), "r" (v) 710 : "cr0", "memory"); 711 712 return (prev); 713 } 714 715 #ifdef __powerpc64__ 716 static __inline u_long 717 atomic_swap_64(volatile u_long *p, u_long v) 718 { 719 u_long prev; 720 721 __asm __volatile( 722 "1: ldarx %0,0,%2\n" 723 " stdcx. %3,0,%2\n" 724 " bne- 1b\n" 725 : "=&r" (prev), "+m" (*(volatile u_long *)p) 726 : "r" (p), "r" (v) 727 : "cr0", "memory"); 728 729 return (prev); 730 } 731 #endif 732 733 #define atomic_fetchadd_32 atomic_fetchadd_int 734 #define atomic_swap_int atomic_swap_32 735 736 #ifdef __powerpc64__ 737 #define atomic_fetchadd_64 atomic_fetchadd_long 738 #define atomic_swap_long atomic_swap_64 739 #define atomic_swap_ptr atomic_swap_64 740 #endif 741 742 #undef __ATOMIC_REL 743 #undef __ATOMIC_ACQ 744 745 static __inline void 746 atomic_thread_fence_acq(void) 747 { 748 749 powerpc_lwsync(); 750 } 751 752 static __inline void 753 atomic_thread_fence_rel(void) 754 { 755 756 powerpc_lwsync(); 757 } 758 759 static __inline void 760 atomic_thread_fence_acq_rel(void) 761 { 762 763 powerpc_lwsync(); 764 } 765 766 static __inline void 767 atomic_thread_fence_seq_cst(void) 768 { 769 770 __asm __volatile("sync" : : : "memory"); 771 } 772 773 #endif /* ! _MACHINE_ATOMIC_H_ */ 774