Lines Matching full:volatile

479 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
481 InterlockedExchangeAdd((volatile long *)(p), 1)
482 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
484 InterlockedExchangeAdd((volatile long *)(p), 4)
485 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
487 InterlockedExchangeAdd((volatile long *)(p), -1)
489 InterlockedExchangeAdd((volatile long *)(p), (v))
492 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
495 InterlockedExchange((volatile long *)(p), (long)(v))
497 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
499 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
500 kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
511 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
512 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
513 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
514 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
515 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
516 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
517 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
518 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
541 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
544 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
547 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
550 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
553 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
560 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
565 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
570 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
575 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
580 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
582 return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
585 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
587 return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
590 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
595 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
600 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
613 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
618 _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
622 inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
623 kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
630 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
632 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
634 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
636 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
638 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
640 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
642 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
644 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
646 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
649 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
650 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
651 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
652 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
653 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
654 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
682 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
685 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
688 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
691 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
696 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
700 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
709 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
713 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
725 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
727 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
730 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
732 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
735 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
737 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
740 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
742 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
745 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
747 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
749 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
751 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
756 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
758 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
760 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
763 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
765 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
767 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
769 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
772 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
776 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
780 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
782 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
784 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
786 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
789 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
792 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
796 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
798 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
802 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
805 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
808 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
811 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
814 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
817 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
820 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
824 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
827 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
830 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
833 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
839 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
847 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
850 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
853 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
857 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
860 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
863 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
869 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
873 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
876 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
878 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
880 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
882 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
883 volatile kmp_uint32 *up;
893 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
894 volatile kmp_uint64 *up;
906 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
907 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
908 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
909 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
910 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
911 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
912 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
913 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
914 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
916 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
918 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
920 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
922 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
924 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
926 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
928 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
930 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
933 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
934 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
935 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
936 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
937 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
938 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
941 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
943 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
945 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
949 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
951 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
953 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
957 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
959 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
961 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
965 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
967 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
969 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
972 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
974 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
976 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
978 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
980 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
982 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
985 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
988 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
991 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
994 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
997 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1000 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1003 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1006 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1011 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1015 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1024 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1027 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1031 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1124 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
1125 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
1127 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
1128 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
1146 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1151 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1186 #define VOLATILE_CAST(x) (volatile x)