1 /* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */ 2 3 /*- 4 * Copyright (C) 2003-2004 Olivier Houchard 5 * Copyright (C) 1994-1997 Mark Brinicombe 6 * Copyright (C) 1994 Brini 7 * All rights reserved. 8 * 9 * This code is derived from software written for Brini by Mark Brinicombe 10 * 11 * Redistribution and use in source and binary forms, with or without 12 * modification, are permitted provided that the following conditions 13 * are met: 14 * 1. Redistributions of source code must retain the above copyright 15 * notice, this list of conditions and the following disclaimer. 16 * 2. Redistributions in binary form must reproduce the above copyright 17 * notice, this list of conditions and the following disclaimer in the 18 * documentation and/or other materials provided with the distribution. 19 * 3. All advertising materials mentioning features or use of this software 20 * must display the following acknowledgement: 21 * This product includes software developed by Brini. 22 * 4. The name of Brini may not be used to endorse or promote products 23 * derived from this software without specific prior written permission. 24 * 25 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR 26 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 28 * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 29 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 30 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 31 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 32 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 33 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 34 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 35 * 36 * $FreeBSD$ 37 */ 38 39 #ifndef _MACHINE_ATOMIC_H_ 40 #define _MACHINE_ATOMIC_H_ 41 42 #include <sys/types.h> 43 44 #ifndef _KERNEL 45 #include <machine/sysarch.h> 46 #else 47 #include <machine/cpuconf.h> 48 #endif 49 50 #if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__) 51 #define isb() __asm __volatile("isb" : : : "memory") 52 #define dsb() __asm __volatile("dsb" : : : "memory") 53 #define dmb() __asm __volatile("dmb" : : : "memory") 54 #elif defined (__ARM_ARCH_6__) || defined (__ARM_ARCH_6J__) || \ 55 defined (__ARM_ARCH_6K__) || defined (__ARM_ARCH_6T2__) || \ 56 defined (__ARM_ARCH_6Z__) || defined (__ARM_ARCH_6ZK__) 57 #define isb() __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory") 58 #define dsb() __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory") 59 #define dmb() __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory") 60 #else 61 #define isb() 62 #define dsb() 63 #define dmb() 64 #endif 65 66 #define mb() dmb() 67 #define wmb() dmb() 68 #define rmb() dmb() 69 70 #ifndef I32_bit 71 #define I32_bit (1 << 7) /* IRQ disable */ 72 #endif 73 #ifndef F32_bit 74 #define F32_bit (1 << 6) /* FIQ disable */ 75 #endif 76 77 /* 78 * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h 79 * here, but that header can't be included here because this is C 80 * code. I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition 81 * out of asm.h so it can be used in both asm and C code. - kientzle@ 82 */ 83 #if defined (__ARM_ARCH_7__) || \ 84 defined (__ARM_ARCH_7A__) || \ 85 defined (__ARM_ARCH_6__) || \ 86 defined (__ARM_ARCH_6J__) || \ 87 defined (__ARM_ARCH_6K__) || \ 88 defined (__ARM_ARCH_6T2__) || \ 89 defined (__ARM_ARCH_6Z__) || \ 90 defined (__ARM_ARCH_6ZK__) 91 static __inline void 92 __do_dmb(void) 93 { 94 95 #if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__) 96 __asm __volatile("dmb" : : : "memory"); 97 #else 98 __asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory"); 99 #endif 100 } 101 102 #define ATOMIC_ACQ_REL_LONG(NAME) \ 103 static __inline void \ 104 atomic_##NAME##_acq_long(__volatile u_long *p, u_long v) \ 105 { \ 106 atomic_##NAME##_long(p, v); \ 107 __do_dmb(); \ 108 } \ 109 \ 110 static __inline void \ 111 atomic_##NAME##_rel_long(__volatile u_long *p, u_long v) \ 112 { \ 113 __do_dmb(); \ 114 atomic_##NAME##_long(p, v); \ 115 } 116 117 #define ATOMIC_ACQ_REL(NAME, WIDTH) \ 118 static __inline void \ 119 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\ 120 { \ 121 atomic_##NAME##_##WIDTH(p, v); \ 122 __do_dmb(); \ 123 } \ 124 \ 125 static __inline void \ 126 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\ 127 { \ 128 __do_dmb(); \ 129 atomic_##NAME##_##WIDTH(p, v); \ 130 } 131 132 static __inline void 133 atomic_set_32(volatile uint32_t *address, uint32_t setmask) 134 { 135 uint32_t tmp = 0, tmp2 = 0; 136 137 __asm __volatile("1: ldrex %0, [%2]\n" 138 "orr %0, %0, %3\n" 139 "strex %1, %0, [%2]\n" 140 "cmp %1, #0\n" 141 "it ne\n" 142 "bne 1b\n" 143 : "=&r" (tmp), "+r" (tmp2) 144 , "+r" (address), "+r" (setmask) : : "cc", "memory"); 145 146 } 147 148 static __inline void 149 atomic_set_long(volatile u_long *address, u_long setmask) 150 { 151 u_long tmp = 0, tmp2 = 0; 152 153 __asm __volatile("1: ldrex %0, [%2]\n" 154 "orr %0, %0, %3\n" 155 "strex %1, %0, [%2]\n" 156 "cmp %1, #0\n" 157 "it ne\n" 158 "bne 1b\n" 159 : "=&r" (tmp), "+r" (tmp2) 160 , "+r" (address), "+r" (setmask) : : "cc", "memory"); 161 162 } 163 164 static __inline void 165 atomic_clear_32(volatile uint32_t *address, uint32_t setmask) 166 { 167 uint32_t tmp = 0, tmp2 = 0; 168 169 __asm __volatile("1: ldrex %0, [%2]\n" 170 "bic %0, %0, %3\n" 171 "strex %1, %0, [%2]\n" 172 "cmp %1, #0\n" 173 "it ne\n" 174 "bne 1b\n" 175 : "=&r" (tmp), "+r" (tmp2) 176 ,"+r" (address), "+r" (setmask) : : "cc", "memory"); 177 } 178 179 static __inline void 180 atomic_clear_long(volatile u_long *address, u_long setmask) 181 { 182 u_long tmp = 0, tmp2 = 0; 183 184 __asm __volatile("1: ldrex %0, [%2]\n" 185 "bic %0, %0, %3\n" 186 "strex %1, %0, [%2]\n" 187 "cmp %1, #0\n" 188 "it ne\n" 189 "bne 1b\n" 190 : "=&r" (tmp), "+r" (tmp2) 191 ,"+r" (address), "+r" (setmask) : : "cc", "memory"); 192 } 193 194 static __inline u_int32_t 195 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval) 196 { 197 uint32_t ret; 198 199 __asm __volatile("1: ldrex %0, [%1]\n" 200 "cmp %0, %2\n" 201 "it ne\n" 202 "movne %0, #0\n" 203 "bne 2f\n" 204 "strex %0, %3, [%1]\n" 205 "cmp %0, #0\n" 206 "ite eq\n" 207 "moveq %0, #1\n" 208 "bne 1b\n" 209 "2:" 210 : "=&r" (ret) 211 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", 212 "memory"); 213 return (ret); 214 } 215 216 static __inline u_long 217 atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval) 218 { 219 u_long ret; 220 221 __asm __volatile("1: ldrex %0, [%1]\n" 222 "cmp %0, %2\n" 223 "itt ne\n" 224 "movne %0, #0\n" 225 "bne 2f\n" 226 "strex %0, %3, [%1]\n" 227 "cmp %0, #0\n" 228 "ite eq\n" 229 "moveq %0, #1\n" 230 "bne 1b\n" 231 "2:" 232 : "=&r" (ret) 233 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", 234 "memory"); 235 return (ret); 236 } 237 238 static __inline u_int32_t 239 atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval) 240 { 241 u_int32_t ret = atomic_cmpset_32(p, cmpval, newval); 242 243 __do_dmb(); 244 return (ret); 245 } 246 247 static __inline u_long 248 atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval) 249 { 250 u_long ret = atomic_cmpset_long(p, cmpval, newval); 251 252 __do_dmb(); 253 return (ret); 254 } 255 256 static __inline u_int32_t 257 atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval) 258 { 259 260 __do_dmb(); 261 return (atomic_cmpset_32(p, cmpval, newval)); 262 } 263 264 static __inline u_long 265 atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval) 266 { 267 268 __do_dmb(); 269 return (atomic_cmpset_long(p, cmpval, newval)); 270 } 271 272 273 static __inline void 274 atomic_add_32(volatile u_int32_t *p, u_int32_t val) 275 { 276 uint32_t tmp = 0, tmp2 = 0; 277 278 __asm __volatile("1: ldrex %0, [%2]\n" 279 "add %0, %0, %3\n" 280 "strex %1, %0, [%2]\n" 281 "cmp %1, #0\n" 282 "it ne\n" 283 "bne 1b\n" 284 : "=&r" (tmp), "+r" (tmp2) 285 ,"+r" (p), "+r" (val) : : "cc", "memory"); 286 } 287 288 static __inline void 289 atomic_add_long(volatile u_long *p, u_long val) 290 { 291 u_long tmp = 0, tmp2 = 0; 292 293 __asm __volatile("1: ldrex %0, [%2]\n" 294 "add %0, %0, %3\n" 295 "strex %1, %0, [%2]\n" 296 "cmp %1, #0\n" 297 "it ne\n" 298 "bne 1b\n" 299 : "=&r" (tmp), "+r" (tmp2) 300 ,"+r" (p), "+r" (val) : : "cc", "memory"); 301 } 302 303 static __inline void 304 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val) 305 { 306 uint32_t tmp = 0, tmp2 = 0; 307 308 __asm __volatile("1: ldrex %0, [%2]\n" 309 "sub %0, %0, %3\n" 310 "strex %1, %0, [%2]\n" 311 "cmp %1, #0\n" 312 "it ne\n" 313 "bne 1b\n" 314 : "=&r" (tmp), "+r" (tmp2) 315 ,"+r" (p), "+r" (val) : : "cc", "memory"); 316 } 317 318 static __inline void 319 atomic_subtract_long(volatile u_long *p, u_long val) 320 { 321 u_long tmp = 0, tmp2 = 0; 322 323 __asm __volatile("1: ldrex %0, [%2]\n" 324 "sub %0, %0, %3\n" 325 "strex %1, %0, [%2]\n" 326 "cmp %1, #0\n" 327 "it ne\n" 328 "bne 1b\n" 329 : "=&r" (tmp), "+r" (tmp2) 330 ,"+r" (p), "+r" (val) : : "cc", "memory"); 331 } 332 333 ATOMIC_ACQ_REL(clear, 32) 334 ATOMIC_ACQ_REL(add, 32) 335 ATOMIC_ACQ_REL(subtract, 32) 336 ATOMIC_ACQ_REL(set, 32) 337 ATOMIC_ACQ_REL_LONG(clear) 338 ATOMIC_ACQ_REL_LONG(add) 339 ATOMIC_ACQ_REL_LONG(subtract) 340 ATOMIC_ACQ_REL_LONG(set) 341 342 #undef ATOMIC_ACQ_REL 343 #undef ATOMIC_ACQ_REL_LONG 344 345 static __inline uint32_t 346 atomic_fetchadd_32(volatile uint32_t *p, uint32_t val) 347 { 348 uint32_t tmp = 0, tmp2 = 0, ret = 0; 349 350 __asm __volatile("1: ldrex %0, [%3]\n" 351 "add %1, %0, %4\n" 352 "strex %2, %1, [%3]\n" 353 "cmp %2, #0\n" 354 "it ne\n" 355 "bne 1b\n" 356 : "+r" (ret), "=&r" (tmp), "+r" (tmp2) 357 ,"+r" (p), "+r" (val) : : "cc", "memory"); 358 return (ret); 359 } 360 361 static __inline uint32_t 362 atomic_readandclear_32(volatile u_int32_t *p) 363 { 364 uint32_t ret, tmp = 0, tmp2 = 0; 365 366 __asm __volatile("1: ldrex %0, [%3]\n" 367 "mov %1, #0\n" 368 "strex %2, %1, [%3]\n" 369 "cmp %2, #0\n" 370 "it ne\n" 371 "bne 1b\n" 372 : "=r" (ret), "=&r" (tmp), "+r" (tmp2) 373 ,"+r" (p) : : "cc", "memory"); 374 return (ret); 375 } 376 377 static __inline uint32_t 378 atomic_load_acq_32(volatile uint32_t *p) 379 { 380 uint32_t v; 381 382 v = *p; 383 __do_dmb(); 384 return (v); 385 } 386 387 static __inline void 388 atomic_store_rel_32(volatile uint32_t *p, uint32_t v) 389 { 390 391 __do_dmb(); 392 *p = v; 393 } 394 395 static __inline u_long 396 atomic_fetchadd_long(volatile u_long *p, u_long val) 397 { 398 u_long tmp = 0, tmp2 = 0, ret = 0; 399 400 __asm __volatile("1: ldrex %0, [%3]\n" 401 "add %1, %0, %4\n" 402 "strex %2, %1, [%3]\n" 403 "cmp %2, #0\n" 404 "it ne\n" 405 "bne 1b\n" 406 : "+r" (ret), "=&r" (tmp), "+r" (tmp2) 407 ,"+r" (p), "+r" (val) : : "cc", "memory"); 408 return (ret); 409 } 410 411 static __inline u_long 412 atomic_readandclear_long(volatile u_long *p) 413 { 414 u_long ret, tmp = 0, tmp2 = 0; 415 416 __asm __volatile("1: ldrex %0, [%3]\n" 417 "mov %1, #0\n" 418 "strex %2, %1, [%3]\n" 419 "cmp %2, #0\n" 420 "it ne\n" 421 "bne 1b\n" 422 : "=r" (ret), "=&r" (tmp), "+r" (tmp2) 423 ,"+r" (p) : : "cc", "memory"); 424 return (ret); 425 } 426 427 static __inline u_long 428 atomic_load_acq_long(volatile u_long *p) 429 { 430 u_long v; 431 432 v = *p; 433 __do_dmb(); 434 return (v); 435 } 436 437 static __inline void 438 atomic_store_rel_long(volatile u_long *p, u_long v) 439 { 440 441 __do_dmb(); 442 *p = v; 443 } 444 #else /* < armv6 */ 445 446 #define __with_interrupts_disabled(expr) \ 447 do { \ 448 u_int cpsr_save, tmp; \ 449 \ 450 __asm __volatile( \ 451 "mrs %0, cpsr;" \ 452 "orr %1, %0, %2;" \ 453 "msr cpsr_all, %1;" \ 454 : "=r" (cpsr_save), "=r" (tmp) \ 455 : "I" (I32_bit | F32_bit) \ 456 : "cc" ); \ 457 (expr); \ 458 __asm __volatile( \ 459 "msr cpsr_all, %0" \ 460 : /* no output */ \ 461 : "r" (cpsr_save) \ 462 : "cc" ); \ 463 } while(0) 464 465 static __inline uint32_t 466 __swp(uint32_t val, volatile uint32_t *ptr) 467 { 468 __asm __volatile("swp %0, %2, [%3]" 469 : "=&r" (val), "=m" (*ptr) 470 : "r" (val), "r" (ptr), "m" (*ptr) 471 : "memory"); 472 return (val); 473 } 474 475 476 #ifdef _KERNEL 477 static __inline void 478 atomic_set_32(volatile uint32_t *address, uint32_t setmask) 479 { 480 __with_interrupts_disabled(*address |= setmask); 481 } 482 483 static __inline void 484 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask) 485 { 486 __with_interrupts_disabled(*address &= ~clearmask); 487 } 488 489 static __inline u_int32_t 490 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval) 491 { 492 int ret; 493 494 __with_interrupts_disabled( 495 { 496 if (*p == cmpval) { 497 *p = newval; 498 ret = 1; 499 } else { 500 ret = 0; 501 } 502 }); 503 return (ret); 504 } 505 506 static __inline void 507 atomic_add_32(volatile u_int32_t *p, u_int32_t val) 508 { 509 __with_interrupts_disabled(*p += val); 510 } 511 512 static __inline void 513 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val) 514 { 515 __with_interrupts_disabled(*p -= val); 516 } 517 518 static __inline uint32_t 519 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v) 520 { 521 uint32_t value; 522 523 __with_interrupts_disabled( 524 { 525 value = *p; 526 *p += v; 527 }); 528 return (value); 529 } 530 531 #else /* !_KERNEL */ 532 533 static __inline u_int32_t 534 atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval) 535 { 536 register int done, ras_start = ARM_RAS_START; 537 538 __asm __volatile("1:\n" 539 "adr %1, 1b\n" 540 "str %1, [%0]\n" 541 "adr %1, 2f\n" 542 "str %1, [%0, #4]\n" 543 "ldr %1, [%2]\n" 544 "cmp %1, %3\n" 545 "streq %4, [%2]\n" 546 "2:\n" 547 "mov %1, #0\n" 548 "str %1, [%0]\n" 549 "mov %1, #0xffffffff\n" 550 "str %1, [%0, #4]\n" 551 "moveq %1, #1\n" 552 "movne %1, #0\n" 553 : "+r" (ras_start), "=r" (done) 554 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", "memory"); 555 return (done); 556 } 557 558 static __inline void 559 atomic_add_32(volatile u_int32_t *p, u_int32_t val) 560 { 561 int start, ras_start = ARM_RAS_START; 562 563 __asm __volatile("1:\n" 564 "adr %1, 1b\n" 565 "str %1, [%0]\n" 566 "adr %1, 2f\n" 567 "str %1, [%0, #4]\n" 568 "ldr %1, [%2]\n" 569 "add %1, %1, %3\n" 570 "str %1, [%2]\n" 571 "2:\n" 572 "mov %1, #0\n" 573 "str %1, [%0]\n" 574 "mov %1, #0xffffffff\n" 575 "str %1, [%0, #4]\n" 576 : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val) 577 : : "memory"); 578 } 579 580 static __inline void 581 atomic_subtract_32(volatile u_int32_t *p, u_int32_t val) 582 { 583 int start, ras_start = ARM_RAS_START; 584 585 __asm __volatile("1:\n" 586 "adr %1, 1b\n" 587 "str %1, [%0]\n" 588 "adr %1, 2f\n" 589 "str %1, [%0, #4]\n" 590 "ldr %1, [%2]\n" 591 "sub %1, %1, %3\n" 592 "str %1, [%2]\n" 593 "2:\n" 594 "mov %1, #0\n" 595 "str %1, [%0]\n" 596 "mov %1, #0xffffffff\n" 597 "str %1, [%0, #4]\n" 598 599 : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val) 600 : : "memory"); 601 } 602 603 static __inline void 604 atomic_set_32(volatile uint32_t *address, uint32_t setmask) 605 { 606 int start, ras_start = ARM_RAS_START; 607 608 __asm __volatile("1:\n" 609 "adr %1, 1b\n" 610 "str %1, [%0]\n" 611 "adr %1, 2f\n" 612 "str %1, [%0, #4]\n" 613 "ldr %1, [%2]\n" 614 "orr %1, %1, %3\n" 615 "str %1, [%2]\n" 616 "2:\n" 617 "mov %1, #0\n" 618 "str %1, [%0]\n" 619 "mov %1, #0xffffffff\n" 620 "str %1, [%0, #4]\n" 621 622 : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask) 623 : : "memory"); 624 } 625 626 static __inline void 627 atomic_clear_32(volatile uint32_t *address, uint32_t clearmask) 628 { 629 int start, ras_start = ARM_RAS_START; 630 631 __asm __volatile("1:\n" 632 "adr %1, 1b\n" 633 "str %1, [%0]\n" 634 "adr %1, 2f\n" 635 "str %1, [%0, #4]\n" 636 "ldr %1, [%2]\n" 637 "bic %1, %1, %3\n" 638 "str %1, [%2]\n" 639 "2:\n" 640 "mov %1, #0\n" 641 "str %1, [%0]\n" 642 "mov %1, #0xffffffff\n" 643 "str %1, [%0, #4]\n" 644 : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask) 645 : : "memory"); 646 647 } 648 649 static __inline uint32_t 650 atomic_fetchadd_32(volatile uint32_t *p, uint32_t v) 651 { 652 uint32_t start, tmp, ras_start = ARM_RAS_START; 653 654 __asm __volatile("1:\n" 655 "adr %1, 1b\n" 656 "str %1, [%0]\n" 657 "adr %1, 2f\n" 658 "str %1, [%0, #4]\n" 659 "ldr %1, [%3]\n" 660 "mov %2, %1\n" 661 "add %2, %2, %4\n" 662 "str %2, [%3]\n" 663 "2:\n" 664 "mov %2, #0\n" 665 "str %2, [%0]\n" 666 "mov %2, #0xffffffff\n" 667 "str %2, [%0, #4]\n" 668 : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v) 669 : : "memory"); 670 return (start); 671 } 672 673 #endif /* _KERNEL */ 674 675 676 static __inline uint32_t 677 atomic_readandclear_32(volatile u_int32_t *p) 678 { 679 680 return (__swp(0, p)); 681 } 682 683 #define atomic_cmpset_rel_32 atomic_cmpset_32 684 #define atomic_cmpset_acq_32 atomic_cmpset_32 685 #define atomic_set_rel_32 atomic_set_32 686 #define atomic_set_acq_32 atomic_set_32 687 #define atomic_clear_rel_32 atomic_clear_32 688 #define atomic_clear_acq_32 atomic_clear_32 689 #define atomic_add_rel_32 atomic_add_32 690 #define atomic_add_acq_32 atomic_add_32 691 #define atomic_subtract_rel_32 atomic_subtract_32 692 #define atomic_subtract_acq_32 atomic_subtract_32 693 #define atomic_store_rel_32 atomic_store_32 694 #define atomic_store_rel_long atomic_store_long 695 #define atomic_load_acq_32 atomic_load_32 696 #define atomic_load_acq_long atomic_load_long 697 #define atomic_add_acq_long atomic_add_long 698 #define atomic_add_rel_long atomic_add_long 699 #define atomic_subtract_acq_long atomic_subtract_long 700 #define atomic_subtract_rel_long atomic_subtract_long 701 #define atomic_clear_acq_long atomic_clear_long 702 #define atomic_clear_rel_long atomic_clear_long 703 #define atomic_set_acq_long atomic_set_long 704 #define atomic_set_rel_long atomic_set_long 705 #define atomic_cmpset_acq_long atomic_cmpset_long 706 #define atomic_cmpset_rel_long atomic_cmpset_long 707 #define atomic_load_acq_long atomic_load_long 708 #undef __with_interrupts_disabled 709 710 static __inline void 711 atomic_add_long(volatile u_long *p, u_long v) 712 { 713 714 atomic_add_32((volatile uint32_t *)p, v); 715 } 716 717 static __inline void 718 atomic_clear_long(volatile u_long *p, u_long v) 719 { 720 721 atomic_clear_32((volatile uint32_t *)p, v); 722 } 723 724 static __inline int 725 atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe) 726 { 727 728 return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe)); 729 } 730 731 static __inline u_long 732 atomic_fetchadd_long(volatile u_long *p, u_long v) 733 { 734 735 return (atomic_fetchadd_32((volatile uint32_t *)p, v)); 736 } 737 738 static __inline void 739 atomic_readandclear_long(volatile u_long *p) 740 { 741 742 atomic_readandclear_32((volatile uint32_t *)p); 743 } 744 745 static __inline void 746 atomic_set_long(volatile u_long *p, u_long v) 747 { 748 749 atomic_set_32((volatile uint32_t *)p, v); 750 } 751 752 static __inline void 753 atomic_subtract_long(volatile u_long *p, u_long v) 754 { 755 756 atomic_subtract_32((volatile uint32_t *)p, v); 757 } 758 759 760 761 #endif /* Arch >= v6 */ 762 763 static __inline int 764 atomic_load_32(volatile uint32_t *v) 765 { 766 767 return (*v); 768 } 769 770 static __inline void 771 atomic_store_32(volatile uint32_t *dst, uint32_t src) 772 { 773 *dst = src; 774 } 775 776 static __inline int 777 atomic_load_long(volatile u_long *v) 778 { 779 780 return (*v); 781 } 782 783 static __inline void 784 atomic_store_long(volatile u_long *dst, u_long src) 785 { 786 *dst = src; 787 } 788 789 #define atomic_clear_ptr atomic_clear_32 790 #define atomic_set_ptr atomic_set_32 791 #define atomic_cmpset_ptr atomic_cmpset_32 792 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_32 793 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_32 794 #define atomic_store_ptr atomic_store_32 795 #define atomic_store_rel_ptr atomic_store_rel_32 796 797 #define atomic_add_int atomic_add_32 798 #define atomic_add_acq_int atomic_add_acq_32 799 #define atomic_add_rel_int atomic_add_rel_32 800 #define atomic_subtract_int atomic_subtract_32 801 #define atomic_subtract_acq_int atomic_subtract_acq_32 802 #define atomic_subtract_rel_int atomic_subtract_rel_32 803 #define atomic_clear_int atomic_clear_32 804 #define atomic_clear_acq_int atomic_clear_acq_32 805 #define atomic_clear_rel_int atomic_clear_rel_32 806 #define atomic_set_int atomic_set_32 807 #define atomic_set_acq_int atomic_set_acq_32 808 #define atomic_set_rel_int atomic_set_rel_32 809 #define atomic_cmpset_int atomic_cmpset_32 810 #define atomic_cmpset_acq_int atomic_cmpset_acq_32 811 #define atomic_cmpset_rel_int atomic_cmpset_rel_32 812 #define atomic_fetchadd_int atomic_fetchadd_32 813 #define atomic_readandclear_int atomic_readandclear_32 814 #define atomic_load_acq_int atomic_load_acq_32 815 #define atomic_store_rel_int atomic_store_rel_32 816 817 #endif /* _MACHINE_ATOMIC_H_ */ 818