Lines Matching +full:- +full:p

2  * kmp_os.h -- KPTS runtime header file.
5 //===----------------------------------------------------------------------===//
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
11 //===----------------------------------------------------------------------===//
32 /* -------------------------- Compiler variations ------------------------ */
52 /* ------------------------- Compiler recognition ---------------------- */
98 /* Check for quad-precision extension. */
106 /* Clang doesn't support a software-implemented
107 128-bit extended precision type yet */
233 // macros to cast out qualifiers and to re-interpret types
236 //-------------------------------------------------------------------------
288 //-------------------------------------------------------------------------
300 /* Check if the OS/arch can support user-level mwait */
325 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
327 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
329 /* ---------- Support for cache alignment, padding, etc. ----------------*/
335 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
342 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
479 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
480 #define KMP_TEST_THEN_INC_ACQ32(p) \
481 InterlockedExchangeAdd((volatile long *)(p), 1)
482 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
483 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
484 InterlockedExchangeAdd((volatile long *)(p), 4)
485 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
486 #define KMP_TEST_THEN_DEC_ACQ32(p) \
487 InterlockedExchangeAdd((volatile long *)(p), -1)
488 #define KMP_TEST_THEN_ADD32(p, v) \
489 InterlockedExchangeAdd((volatile long *)(p), (v))
491 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
492 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
494 #define KMP_XCHG_FIXED32(p, v) \
495 InterlockedExchange((volatile long *)(p), (long)(v))
496 #define KMP_XCHG_FIXED64(p, v) \
497 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
499 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
500 kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
504 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
505 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
506 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
507 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
508 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
509 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
511 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
512 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
513 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
514 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
515 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
516 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
517 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
518 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
521 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
522 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
523 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
524 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
525 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
526 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
527 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
528 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
530 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
531 __kmp_compare_and_store_acq8((p), (cv), (sv))
532 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
533 __kmp_compare_and_store_rel8((p), (cv), (sv))
534 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
535 __kmp_compare_and_store_acq16((p), (cv), (sv))
537 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
538 __kmp_compare_and_store_rel16((p), (cv), (sv))
540 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
541 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
543 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
544 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
546 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
547 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
549 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
550 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
552 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
553 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
560 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
562 return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
565 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
567 return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
570 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
572 return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
575 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
577 return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
580 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
582 return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
585 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
587 return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
590 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
592 return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
595 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
597 return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
600 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
602 return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
607 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
608 _InterlockedCompareExchange8((p), (sv), (cv))
609 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
610 _InterlockedCompareExchange16((p), (sv), (cv))
612 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
613 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
617 #define KMP_XCHG_FIXED8(p, v) \
618 _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
619 #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
620 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
622 inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
623 kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
630 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
632 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
634 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
636 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
638 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
640 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
642 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
644 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
646 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
649 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
650 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
651 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
652 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
653 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
654 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
656 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
657 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
658 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
659 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
660 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
661 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
662 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
663 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
664 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
665 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
666 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
667 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
668 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
669 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
670 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
673 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
674 __kmp_compare_and_store8((p), (cv), (sv))
675 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
676 __kmp_compare_and_store8((p), (cv), (sv))
677 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
678 __kmp_compare_and_store16((p), (cv), (sv))
679 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
680 __kmp_compare_and_store16((p), (cv), (sv))
681 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
682 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
684 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
685 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
687 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
688 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
690 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
691 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
695 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
696 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
699 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
700 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
704 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
705 __kmp_compare_and_store_ret8((p), (cv), (sv))
706 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
707 __kmp_compare_and_store_ret16((p), (cv), (sv))
708 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
709 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
712 #define KMP_XCHG_FIXED8(p, v) \
713 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
714 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
715 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
716 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
717 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
718 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
723 /* cast p to correct type so that proper intrinsic will be used */
724 #define KMP_TEST_THEN_INC32(p) \
725 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
726 #define KMP_TEST_THEN_INC_ACQ32(p) \
727 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
729 #define KMP_TEST_THEN_INC64(p) \
730 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
731 #define KMP_TEST_THEN_INC_ACQ64(p) \
732 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
734 #define KMP_TEST_THEN_INC64(p) \
735 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
736 #define KMP_TEST_THEN_INC_ACQ64(p) \
737 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
739 #define KMP_TEST_THEN_ADD4_32(p) \
740 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
741 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
742 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
744 #define KMP_TEST_THEN_ADD4_64(p) \
745 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
746 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
747 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
748 #define KMP_TEST_THEN_DEC64(p) \
749 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
750 #define KMP_TEST_THEN_DEC_ACQ64(p) \
751 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
753 #define KMP_TEST_THEN_ADD4_64(p) \
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
755 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
756 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
757 #define KMP_TEST_THEN_DEC64(p) \
758 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
759 #define KMP_TEST_THEN_DEC_ACQ64(p) \
760 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
762 #define KMP_TEST_THEN_DEC32(p) \
763 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
764 #define KMP_TEST_THEN_DEC_ACQ32(p) \
765 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
766 #define KMP_TEST_THEN_ADD8(p, v) \
767 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
768 #define KMP_TEST_THEN_ADD32(p, v) \
769 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
771 #define KMP_TEST_THEN_ADD64(p, v) \
772 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
775 #define KMP_TEST_THEN_ADD64(p, v) \
776 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
779 #define KMP_TEST_THEN_OR8(p, v) \
780 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
781 #define KMP_TEST_THEN_AND8(p, v) \
782 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
783 #define KMP_TEST_THEN_OR32(p, v) \
784 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
785 #define KMP_TEST_THEN_AND32(p, v) \
786 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
788 #define KMP_TEST_THEN_OR64(p, v) \
789 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
791 #define KMP_TEST_THEN_AND64(p, v) \
792 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
795 #define KMP_TEST_THEN_OR64(p, v) \
796 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
797 #define KMP_TEST_THEN_AND64(p, v) \
798 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
801 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
802 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
804 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
805 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
807 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
808 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
810 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
811 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
813 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
814 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
816 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
817 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
819 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
820 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
823 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
824 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
826 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
827 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
829 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
830 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
833 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
836 return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
839 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
842 __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
846 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
847 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
849 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
850 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
852 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
853 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
856 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
857 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
859 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
860 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
862 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
863 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
868 #define KMP_XCHG_FIXED8(p, v) \
869 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
872 #define KMP_XCHG_FIXED8(p, v) \
873 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
875 #define KMP_XCHG_FIXED16(p, v) \
876 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
877 #define KMP_XCHG_FIXED32(p, v) \
878 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
879 #define KMP_XCHG_FIXED64(p, v) \
880 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
882 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
885 memcpy(&up, &p, sizeof(up));
893 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
896 memcpy(&up, &p, sizeof(up));
906 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
907 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
908 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
909 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
910 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
911 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
912 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
913 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
914 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
916 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
918 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
920 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
922 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
924 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
926 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
928 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
930 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
933 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
934 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
935 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
936 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
937 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
938 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
940 #define KMP_TEST_THEN_INC32(p) \
941 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
942 #define KMP_TEST_THEN_INC_ACQ32(p) \
943 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
944 #define KMP_TEST_THEN_INC64(p) \
945 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
946 #define KMP_TEST_THEN_INC_ACQ64(p) \
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
948 #define KMP_TEST_THEN_ADD4_32(p) \
949 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
950 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
951 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
952 #define KMP_TEST_THEN_ADD4_64(p) \
953 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
954 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
956 #define KMP_TEST_THEN_DEC32(p) \
957 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
958 #define KMP_TEST_THEN_DEC_ACQ32(p) \
959 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
960 #define KMP_TEST_THEN_DEC64(p) \
961 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
962 #define KMP_TEST_THEN_DEC_ACQ64(p) \
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
964 #define KMP_TEST_THEN_ADD8(p, v) \
965 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
966 #define KMP_TEST_THEN_ADD32(p, v) \
967 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
968 #define KMP_TEST_THEN_ADD64(p, v) \
969 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
971 #define KMP_TEST_THEN_OR8(p, v) \
972 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
973 #define KMP_TEST_THEN_AND8(p, v) \
974 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
975 #define KMP_TEST_THEN_OR32(p, v) \
976 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
977 #define KMP_TEST_THEN_AND32(p, v) \
978 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
979 #define KMP_TEST_THEN_OR64(p, v) \
980 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
981 #define KMP_TEST_THEN_AND64(p, v) \
982 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
984 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
985 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
987 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
988 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
990 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
991 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
993 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
994 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
996 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
997 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
999 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
1000 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1002 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
1003 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1005 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
1006 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1010 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1011 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1014 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1015 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1019 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
1020 __kmp_compare_and_store_ret8((p), (cv), (sv))
1021 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
1022 __kmp_compare_and_store_ret16((p), (cv), (sv))
1023 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
1024 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1026 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1027 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1030 #define KMP_XCHG_FIXED8(p, v) \
1031 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1032 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1033 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1034 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1035 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1036 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1040 /* ------------- relaxed consistency memory model stuff ------------------ */
1069 // fence-style instructions do not exist, but lock; xaddl $0,(%rsp) can be used.
1072 // * If users hand-code NGO stores they should insert the fence
1121 /* ------------------------------------------------------------------------ */
1122 // FIXME - maybe this should this be
1138 #define TCD_4(a) (--(a))
1142 #define TCD_8(a) (--(a))
1241 kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1253 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1254 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1256 // For non-default load/store
1257 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1258 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1259 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1260 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1262 // For non-default fetch_<op>
1263 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1264 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1265 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1266 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1267 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1268 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1269 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1270 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1274 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1275 return p->compare_exchange_strong(
1280 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1281 return p->compare_exchange_strong(
1286 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1287 return p->compare_exchange_strong(
1304 // MSVC doesn't have this, but clang/clang-cl does.