Lines Matching refs:lhs
609 static inline Quad_a4_t operator+(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator +() argument
610 return lhs.q + rhs.q; in operator +()
612 static inline Quad_a4_t operator-(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator -() argument
613 return lhs.q - rhs.q; in operator -()
615 static inline Quad_a4_t operator*(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator *() argument
616 return lhs.q * rhs.q; in operator *()
618 static inline Quad_a4_t operator/(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator /() argument
619 return lhs.q / rhs.q; in operator /()
621 static inline bool operator<(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator <() argument
622 return lhs.q < rhs.q; in operator <()
624 static inline bool operator>(Quad_a4_t &lhs, Quad_a4_t &rhs) { in operator >() argument
625 return lhs.q > rhs.q; in operator >()
628 static inline Quad_a16_t operator+(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator +() argument
629 return lhs.q + rhs.q; in operator +()
631 static inline Quad_a16_t operator-(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator -() argument
632 return lhs.q - rhs.q; in operator -()
634 static inline Quad_a16_t operator*(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator *() argument
635 return lhs.q * rhs.q; in operator *()
637 static inline Quad_a16_t operator/(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator /() argument
638 return lhs.q / rhs.q; in operator /()
640 static inline bool operator<(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator <() argument
641 return lhs.q < rhs.q; in operator <()
643 static inline bool operator>(Quad_a16_t &lhs, Quad_a16_t &rhs) { in operator >() argument
644 return lhs.q > rhs.q; in operator >()
647 static inline kmp_cmplx128_a4_t operator+(kmp_cmplx128_a4_t &lhs, in operator +() argument
649 return lhs.q + rhs.q; in operator +()
651 static inline kmp_cmplx128_a4_t operator-(kmp_cmplx128_a4_t &lhs, in operator -() argument
653 return lhs.q - rhs.q; in operator -()
655 static inline kmp_cmplx128_a4_t operator*(kmp_cmplx128_a4_t &lhs, in operator *() argument
657 return lhs.q * rhs.q; in operator *()
659 static inline kmp_cmplx128_a4_t operator/(kmp_cmplx128_a4_t &lhs, in operator /() argument
661 return lhs.q / rhs.q; in operator /()
664 static inline kmp_cmplx128_a16_t operator+(kmp_cmplx128_a16_t &lhs, in operator +() argument
666 return lhs.q + rhs.q; in operator +()
668 static inline kmp_cmplx128_a16_t operator-(kmp_cmplx128_a16_t &lhs, in operator -() argument
670 return lhs.q - rhs.q; in operator -()
672 static inline kmp_cmplx128_a16_t operator*(kmp_cmplx128_a16_t &lhs, in operator *() argument
674 return lhs.q * rhs.q; in operator *()
676 static inline kmp_cmplx128_a16_t operator/(kmp_cmplx128_a16_t &lhs, in operator /() argument
678 return lhs.q / rhs.q; in operator /()
700 TYPE *lhs, TYPE rhs) { \
729 (*lhs) OP(rhs); \
735 (*lhs) = (TYPE)((*lhs)OP rhs); \
793 old_value = *(TYPE volatile *)lhs; \
796 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
800 old_value = *(TYPE volatile *)lhs; \
821 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
824 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
828 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
848 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
851 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
855 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
863 (*lhs) = (*lhs)OP rhs; \
877 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
905 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
907 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
919 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
934 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1081 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1082 OP_CRITICAL(= *lhs OP, LCK_ID) \
1091 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1100 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1101 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1105 OP_CRITICAL(= *lhs OP, LCK_ID) /* unaligned - use critical */ \
1139 if (*lhs OP rhs) { /* still need actions? */ \
1140 *lhs = rhs; \
1161 temp_val = *lhs; \
1165 (kmp_int##BITS *)lhs, \
1168 temp_val = *lhs; \
1177 if (*lhs OP rhs) { /* need actions? */ \
1190 if (*lhs OP rhs) { \
1202 if (*lhs OP rhs) { \
1204 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1283 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1422 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
1445 TYPE *lhs, TYPE rhs) { \
1460 temp_val = *lhs; \
1464 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1468 temp_val = *lhs; \
1632 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs) { \
1664 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1898 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
2105 KMP_XCHG_FIXED##BITS(lhs, rhs); \
2111 KMP_XCHG_REAL##BITS(lhs, rhs); \
2125 temp_val = *lhs; \
2129 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2131 temp_val = *lhs; \
2213 TYPE *lhs, TYPE rhs, int flag) { \
2227 (*lhs) OP rhs; \
2228 new_value = (*lhs); \
2230 new_value = (*lhs); \
2231 (*lhs) OP rhs; \
2241 (*lhs) = (TYPE)((*lhs)OP rhs); \
2242 new_value = (*lhs); \
2244 new_value = (*lhs); \
2245 (*lhs) = (TYPE)((*lhs)OP rhs); \
2273 temp_val = *lhs; \
2277 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2279 temp_val = *lhs; \
2305 old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
2443 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs, int flag) { \
2580 (*lhs) = new_value; \
2582 new_value = (*lhs); \
2583 (*lhs) OP rhs; \
2606 OP_GOMP_CRITICAL_L_CPT(= *lhs OP, GOMP_FLAG) \
2639 if (*lhs OP rhs) { /* still need actions? */ \
2640 old_value = *lhs; \
2641 *lhs = rhs; \
2647 new_value = *lhs; \
2668 temp_val = *lhs; \
2672 (kmp_int##BITS *)lhs, \
2675 temp_val = *lhs; \
2689 if (*lhs OP rhs) { /* need actions? */ \
2693 return *lhs; \
2700 if (*lhs OP rhs) { \
2704 return *lhs; \
2806 (*lhs) OP rhs; \
2807 (*out) = (*lhs); \
2809 (*out) = (*lhs); \
2810 (*lhs) OP rhs; \
2829 void __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, TYPE *lhs, \
2939 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2940 new_value = (*lhs); \
2942 new_value = (*lhs); \
2943 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2970 temp_val = *lhs; \
2974 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2976 temp_val = *lhs; \
3096 (*lhs) = (rhs)OP(*lhs); \
3097 (*out) = (*lhs); \
3099 (*out) = (*lhs); \
3100 (*lhs) = (rhs)OP(*lhs); \
3248 TYPE __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3256 old_value = (*lhs); \
3257 (*lhs) = rhs; \
3277 old_value = KMP_XCHG_FIXED##BITS(lhs, rhs); \
3285 old_value = KMP_XCHG_REAL##BITS(lhs, rhs); \
3294 temp_val = *lhs; \
3298 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
3300 temp_val = *lhs; \
3350 void __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3358 tmp = (*lhs); \
3359 (*lhs) = (rhs); \
3415 void __kmpc_atomic_1(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_1() argument
3428 old_value = *(kmp_int8 *)lhs; in __kmpc_atomic_1()
3432 while (!KMP_COMPARE_AND_STORE_ACQ8((kmp_int8 *)lhs, *(kmp_int8 *)&old_value, in __kmpc_atomic_1()
3436 old_value = *(kmp_int8 *)lhs; in __kmpc_atomic_1()
3451 (*f)(lhs, lhs, rhs); in __kmpc_atomic_1()
3462 void __kmpc_atomic_2(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_2() argument
3470 !((kmp_uintptr_t)lhs & 0x1) /* make sure address is 2-byte aligned */ in __kmpc_atomic_2()
3475 old_value = *(kmp_int16 *)lhs; in __kmpc_atomic_2()
3480 (kmp_int16 *)lhs, *(kmp_int16 *)&old_value, *(kmp_int16 *)&new_value)) { in __kmpc_atomic_2()
3483 old_value = *(kmp_int16 *)lhs; in __kmpc_atomic_2()
3498 (*f)(lhs, lhs, rhs); in __kmpc_atomic_2()
3509 void __kmpc_atomic_4(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_4() argument
3519 !((kmp_uintptr_t)lhs & 0x3) /* make sure address is 4-byte aligned */ in __kmpc_atomic_4()
3524 old_value = *(kmp_int32 *)lhs; in __kmpc_atomic_4()
3529 (kmp_int32 *)lhs, *(kmp_int32 *)&old_value, *(kmp_int32 *)&new_value)) { in __kmpc_atomic_4()
3532 old_value = *(kmp_int32 *)lhs; in __kmpc_atomic_4()
3548 (*f)(lhs, lhs, rhs); in __kmpc_atomic_4()
3559 void __kmpc_atomic_8(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_8() argument
3569 !((kmp_uintptr_t)lhs & 0x7) /* make sure address is 8-byte aligned */ in __kmpc_atomic_8()
3574 old_value = *(kmp_int64 *)lhs; in __kmpc_atomic_8()
3578 (kmp_int64 *)lhs, *(kmp_int64 *)&old_value, *(kmp_int64 *)&new_value)) { in __kmpc_atomic_8()
3581 old_value = *(kmp_int64 *)lhs; in __kmpc_atomic_8()
3597 (*f)(lhs, lhs, rhs); in __kmpc_atomic_8()
3608 void __kmpc_atomic_10(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_10() argument
3619 (*f)(lhs, lhs, rhs); in __kmpc_atomic_10()
3630 void __kmpc_atomic_16(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_16() argument
3641 (*f)(lhs, lhs, rhs); in __kmpc_atomic_16()
3651 void __kmpc_atomic_20(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_20() argument
3662 (*f)(lhs, lhs, rhs); in __kmpc_atomic_20()
3672 void __kmpc_atomic_32(ident_t *id_ref, int gtid, void *lhs, void *rhs, in __kmpc_atomic_32() argument
3683 (*f)(lhs, lhs, rhs); in __kmpc_atomic_32()