Lines Matching refs:BITS

790 #define OP_CMPXCHG(TYPE, BITS, OP)                                             \  argument
795 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
796 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
797 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
812 #define OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \ argument
816 kmp_int##BITS *vvv; \
819 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
820 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
821 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
823 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
824 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
825 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
828 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
839 #define OP_CMPXCHG(TYPE, BITS, OP) \ argument
843 kmp_int##BITS *vvv; \
846 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
847 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
848 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
850 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
851 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
852 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
855 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
872 #define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
877 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
880 #define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
884 OP_CMPXCHG(TYPE, BITS, OP) \
889 #define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \ argument
893 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
901 #define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
907 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
915 #define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
920 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
930 #define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \ argument
935 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1089 #define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \ argument
1092 OP_CMPXCHG(TYPE, BITS, OP) \
1098 #define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \
1102 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1157 #define MIN_MAX_CMPXCHG(TYPE, BITS, OP) \ argument
1164 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
1165 (kmp_int##BITS *)lhs, \
1166 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1167 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
1187 #define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
1192 MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1199 #define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1205 MIN_MAX_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1269 #define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \ argument
1273 OP_CMPXCHG(TYPE, BITS, OP) \
1279 #define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1284 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1456 #define OP_CMPXCHG_REV(TYPE, BITS, OP) \ argument
1463 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
1464 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1465 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
1475 #define ATOMIC_CMPXCHG_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, GOMP_FLAG) \ argument
1478 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1650 #define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \ argument
1654 OP_CMPXCHG(TYPE, BITS, OP) \
1660 #define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1665 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1677 #define ATOMIC_CMPXCHG_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \ argument
1681 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1876 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \ argument
1880 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
1884 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1888 OP_CMPXCHG(TYPE, BITS, OP) \
1894 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1899 OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */ \
1943 #define OP_CMPXCHG_READ(TYPE, BITS, OP) \ argument
1948 kmp_int##BITS i_val; \
1953 old_value.i_val = KMP_COMPARE_AND_STORE_RET##BITS( \
1954 (kmp_int##BITS *)loc, \
1955 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val, \
1956 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val); \
1987 #define ATOMIC_FIXED_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
1991 new_value = KMP_TEST_THEN_ADD##BITS(loc, OP 0); \
1995 #define ATOMIC_CMPXCHG_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
1999 OP_CMPXCHG_READ(TYPE, BITS, OP) \
2102 #define ATOMIC_XCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2105 KMP_XCHG_FIXED##BITS(lhs, rhs); \
2108 #define ATOMIC_XCHG_FLOAT_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2111 KMP_XCHG_REAL##BITS(lhs, rhs); \
2121 #define OP_CMPXCHG_WR(TYPE, BITS, OP) \ argument
2128 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2129 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2130 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2138 #define ATOMIC_CMPXCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2141 OP_CMPXCHG_WR(TYPE, BITS, OP) \
2269 #define OP_CMPXCHG_CPT(TYPE, BITS, OP) \ argument
2276 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2277 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2278 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2290 #define ATOMIC_CMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2295 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2299 #define ATOMIC_FIXED_ADD_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2305 old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
2450 #define ATOMIC_CMPXCHG_CPT_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \ argument
2456 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2602 #define ATOMIC_CMPX_L_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2607 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2664 #define MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \ argument
2671 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
2672 (kmp_int##BITS *)lhs, \
2673 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2674 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
2696 #define MIN_MAX_COMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2702 MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \
2759 #define ATOMIC_CMPX_EQV_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2764 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2966 #define OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \ argument
2973 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2974 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2975 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2987 #define ATOMIC_CMPXCHG_CPT_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \ argument
2992 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
3163 #define ATOMIC_CMPXCHG_CPT_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \ argument
3169 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
3273 #define ATOMIC_XCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \ argument
3277 old_value = KMP_XCHG_FIXED##BITS(lhs, rhs); \
3281 #define ATOMIC_XCHG_FLOAT_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \ argument
3285 old_value = KMP_XCHG_REAL##BITS(lhs, rhs); \
3290 #define CMPXCHG_SWP(TYPE, BITS) \ argument
3297 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
3298 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
3299 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
3308 #define ATOMIC_CMPXCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \ argument
3313 CMPXCHG_SWP(TYPE, BITS) \