Lines Matching refs:lck

1057   ompt_wait_id_t lck;  in __kmpc_ordered()  local
1062 lck = (ompt_wait_id_t)(uintptr_t)&team->t.t_ordered.dt.t_value; in __kmpc_ordered()
1064 th->th.ompt_thread_info.wait_id = lck; in __kmpc_ordered()
1071 ompt_mutex_ordered, omp_lock_hint_none, kmp_mutex_impl_spin, lck, in __kmpc_ordered()
1091 ompt_mutex_ordered, (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra); in __kmpc_ordered()
1147 kmp_indirect_lock_t **lck; in __kmp_init_indirect_csptr() local
1148 lck = (kmp_indirect_lock_t **)crit; in __kmp_init_indirect_csptr()
1158 int status = KMP_COMPARE_AND_STORE_PTR(lck, nullptr, ilk); in __kmp_init_indirect_csptr()
1167 KMP_DEBUG_ASSERT(*lck != NULL); in __kmp_init_indirect_csptr()
1291 kmp_user_lock_p lck = (kmp_user_lock_p)TCR_PTR(*lck_pp); in __kmp_get_critical_section_ptr() local
1293 if (lck == NULL) { in __kmp_get_critical_section_ptr()
1298 lck = __kmp_user_lock_allocate(&idx, gtid, kmp_lf_critical_section); in __kmp_get_critical_section_ptr()
1299 __kmp_init_user_lock_with_checks(lck); in __kmp_get_critical_section_ptr()
1300 __kmp_set_user_lock_location(lck, loc); in __kmp_get_critical_section_ptr()
1302 __kmp_itt_critical_creating(lck); in __kmp_get_critical_section_ptr()
1313 int status = KMP_COMPARE_AND_STORE_PTR(lck_pp, 0, lck); in __kmp_get_critical_section_ptr()
1318 __kmp_itt_critical_destroyed(lck); in __kmp_get_critical_section_ptr()
1322 __kmp_destroy_user_lock_with_checks(lck); in __kmp_get_critical_section_ptr()
1323 __kmp_user_lock_free(&idx, gtid, lck); in __kmp_get_critical_section_ptr()
1324 lck = (kmp_user_lock_p)TCR_PTR(*lck_pp); in __kmp_get_critical_section_ptr()
1325 KMP_DEBUG_ASSERT(lck != NULL); in __kmp_get_critical_section_ptr()
1328 return lck; in __kmp_get_critical_section_ptr()
1356 kmp_user_lock_p lck; in __kmpc_critical()
1367 (sizeof(lck->tas.lk.poll) <= OMP_CRITICAL_SIZE)) { in __kmpc_critical()
1368 lck = (kmp_user_lock_p)crit; in __kmpc_critical()
1372 (sizeof(lck->futex.lk.poll) <= OMP_CRITICAL_SIZE)) { in __kmpc_critical()
1373 lck = (kmp_user_lock_p)crit; in __kmpc_critical()
1377 lck = __kmp_get_critical_section_ptr(crit, loc, global_tid); in __kmpc_critical()
1381 __kmp_push_sync(global_tid, ct_critical, loc, lck); in __kmpc_critical()
1389 __kmp_itt_critical_acquiring(lck); in __kmpc_critical()
1398 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck; in __kmpc_critical()
1406 (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra); in __kmpc_critical()
1412 __kmp_acquire_user_lock_with_checks(lck, global_tid); in __kmpc_critical()
1415 __kmp_itt_critical_acquired(lck); in __kmpc_critical()
1426 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck, codeptr_ra); in __kmpc_critical()
1573 kmp_user_lock_p lck; in __kmpc_critical_with_hint() local
1603 lck = (kmp_user_lock_p)lk; in __kmpc_critical_with_hint()
1605 __kmp_push_sync(global_tid, ct_critical, loc, lck, in __kmpc_critical_with_hint()
1609 __kmp_itt_critical_acquiring(lck); in __kmpc_critical_with_hint()
1616 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck; in __kmpc_critical_with_hint()
1623 __ompt_get_mutex_impl_type(crit), (ompt_wait_id_t)(uintptr_t)lck, in __kmpc_critical_with_hint()
1630 KMP_ACQUIRE_TAS_LOCK(lck, global_tid); in __kmpc_critical_with_hint()
1634 KMP_ACQUIRE_FUTEX_LOCK(lck, global_tid); in __kmpc_critical_with_hint()
1642 lck = ilk->lock; in __kmpc_critical_with_hint()
1644 __kmp_push_sync(global_tid, ct_critical, loc, lck, in __kmpc_critical_with_hint()
1648 __kmp_itt_critical_acquiring(lck); in __kmpc_critical_with_hint()
1655 ti.wait_id = (ompt_wait_id_t)(uintptr_t)lck; in __kmpc_critical_with_hint()
1662 __ompt_get_mutex_impl_type(0, ilk), (ompt_wait_id_t)(uintptr_t)lck, in __kmpc_critical_with_hint()
1667 KMP_I_LOCK_FUNC(ilk, set)(lck, global_tid); in __kmpc_critical_with_hint()
1672 __kmp_itt_critical_acquired(lck); in __kmpc_critical_with_hint()
1683 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_critical_with_hint()
1705 kmp_user_lock_p lck; in __kmpc_end_critical() local
1712 lck = (kmp_user_lock_p)crit; in __kmpc_end_critical()
1713 KMP_ASSERT(lck != NULL); in __kmpc_end_critical()
1718 __kmp_itt_critical_releasing(lck); in __kmpc_end_critical()
1722 KMP_RELEASE_TAS_LOCK(lck, global_tid); in __kmpc_end_critical()
1726 KMP_RELEASE_FUTEX_LOCK(lck, global_tid); in __kmpc_end_critical()
1730 KMP_D_LOCK_FUNC(lck, unset)((kmp_dyna_lock_t *)lck, global_tid); in __kmpc_end_critical()
1736 lck = ilk->lock; in __kmpc_end_critical()
1741 __kmp_itt_critical_releasing(lck); in __kmpc_end_critical()
1743 KMP_I_LOCK_FUNC(ilk, unset)(lck, global_tid); in __kmpc_end_critical()
1749 (sizeof(lck->tas.lk.poll) <= OMP_CRITICAL_SIZE)) { in __kmpc_end_critical()
1750 lck = (kmp_user_lock_p)crit; in __kmpc_end_critical()
1754 (sizeof(lck->futex.lk.poll) <= OMP_CRITICAL_SIZE)) { in __kmpc_end_critical()
1755 lck = (kmp_user_lock_p)crit; in __kmpc_end_critical()
1759 lck = (kmp_user_lock_p)TCR_PTR(*((kmp_user_lock_p *)crit)); in __kmpc_end_critical()
1762 KMP_ASSERT(lck != NULL); in __kmpc_end_critical()
1768 __kmp_itt_critical_releasing(lck); in __kmpc_end_critical()
1772 __kmp_release_user_lock_with_checks(lck, global_tid); in __kmpc_end_critical()
1782 ompt_mutex_critical, (ompt_wait_id_t)(uintptr_t)lck, in __kmpc_end_critical()
2538 kmp_user_lock_p lck; in __kmpc_init_lock()
2550 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_init_lock()
2551 lck = (kmp_user_lock_p)user_lock; in __kmpc_init_lock()
2555 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_init_lock()
2556 lck = (kmp_user_lock_p)user_lock; in __kmpc_init_lock()
2560 lck = __kmp_user_lock_allocate(user_lock, gtid, 0); in __kmpc_init_lock()
2562 INIT_LOCK(lck); in __kmpc_init_lock()
2563 __kmp_set_user_lock_location(lck, loc); in __kmpc_init_lock()
2578 __kmp_itt_lock_creating(lck); in __kmpc_init_lock()
2610 kmp_user_lock_p lck; in __kmpc_init_nest_lock()
2622 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_init_nest_lock()
2624 lck = (kmp_user_lock_p)user_lock; in __kmpc_init_nest_lock()
2628 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_init_nest_lock()
2630 lck = (kmp_user_lock_p)user_lock; in __kmpc_init_nest_lock()
2634 lck = __kmp_user_lock_allocate(user_lock, gtid, 0); in __kmpc_init_nest_lock()
2637 INIT_NESTED_LOCK(lck); in __kmpc_init_nest_lock()
2638 __kmp_set_user_lock_location(lck, loc); in __kmpc_init_nest_lock()
2653 __kmp_itt_lock_creating(lck); in __kmpc_init_nest_lock()
2663 kmp_user_lock_p lck; in __kmpc_destroy_lock() local
2665 lck = ((kmp_indirect_lock_t *)KMP_LOOKUP_I_LOCK(user_lock))->lock; in __kmpc_destroy_lock()
2667 lck = (kmp_user_lock_p)user_lock; in __kmpc_destroy_lock()
2669 __kmp_itt_lock_destroyed(lck); in __kmpc_destroy_lock()
2683 kmp_user_lock_p lck; in __kmpc_destroy_lock()
2686 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_destroy_lock()
2687 lck = (kmp_user_lock_p)user_lock; in __kmpc_destroy_lock()
2691 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_destroy_lock()
2692 lck = (kmp_user_lock_p)user_lock; in __kmpc_destroy_lock()
2696 lck = __kmp_lookup_user_lock(user_lock, "omp_destroy_lock"); in __kmpc_destroy_lock()
2711 __kmp_itt_lock_destroyed(lck); in __kmpc_destroy_lock()
2713 DESTROY_LOCK(lck); in __kmpc_destroy_lock()
2716 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_destroy_lock()
2721 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_destroy_lock()
2726 __kmp_user_lock_free(user_lock, gtid, lck); in __kmpc_destroy_lock()
2753 kmp_user_lock_p lck; in __kmpc_destroy_nest_lock()
2756 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_destroy_nest_lock()
2758 lck = (kmp_user_lock_p)user_lock; in __kmpc_destroy_nest_lock()
2762 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_destroy_nest_lock()
2764 lck = (kmp_user_lock_p)user_lock; in __kmpc_destroy_nest_lock()
2768 lck = __kmp_lookup_user_lock(user_lock, "omp_destroy_nest_lock"); in __kmpc_destroy_nest_lock()
2783 __kmp_itt_lock_destroyed(lck); in __kmpc_destroy_nest_lock()
2786 DESTROY_NESTED_LOCK(lck); in __kmpc_destroy_nest_lock()
2789 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_destroy_nest_lock()
2795 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_destroy_nest_lock()
2801 __kmp_user_lock_free(user_lock, gtid, lck); in __kmpc_destroy_nest_lock()
2851 kmp_user_lock_p lck; in __kmpc_set_lock() local
2854 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_set_lock()
2855 lck = (kmp_user_lock_p)user_lock; in __kmpc_set_lock()
2859 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_set_lock()
2860 lck = (kmp_user_lock_p)user_lock; in __kmpc_set_lock()
2864 lck = __kmp_lookup_user_lock(user_lock, "omp_set_lock"); in __kmpc_set_lock()
2868 __kmp_itt_lock_acquiring(lck); in __kmpc_set_lock()
2878 (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_set_lock()
2882 ACQUIRE_LOCK(lck, gtid); in __kmpc_set_lock()
2885 __kmp_itt_lock_acquired(lck); in __kmpc_set_lock()
2891 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_set_lock()
2946 kmp_user_lock_p lck; in __kmpc_set_nest_lock()
2949 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_set_nest_lock()
2951 lck = (kmp_user_lock_p)user_lock; in __kmpc_set_nest_lock()
2955 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_set_nest_lock()
2957 lck = (kmp_user_lock_p)user_lock; in __kmpc_set_nest_lock()
2961 lck = __kmp_lookup_user_lock(user_lock, "omp_set_nest_lock"); in __kmpc_set_nest_lock()
2965 __kmp_itt_lock_acquiring(lck); in __kmpc_set_nest_lock()
2976 __ompt_get_mutex_impl_type(), (ompt_wait_id_t)(uintptr_t)lck, in __kmpc_set_nest_lock()
2982 ACQUIRE_NESTED_LOCK(lck, gtid, &acquire_status); in __kmpc_set_nest_lock()
2985 __kmp_itt_lock_acquired(lck); in __kmpc_set_nest_lock()
2994 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_set_nest_lock()
3000 ompt_scope_begin, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_set_nest_lock()
3042 kmp_user_lock_p lck; in __kmpc_unset_lock()
3048 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_unset_lock()
3065 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_lock()
3071 lck = (kmp_user_lock_p)user_lock; in __kmpc_unset_lock()
3076 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_unset_lock()
3077 lck = (kmp_user_lock_p)user_lock; in __kmpc_unset_lock()
3081 lck = __kmp_lookup_user_lock(user_lock, "omp_unset_lock"); in __kmpc_unset_lock()
3085 __kmp_itt_lock_releasing(lck); in __kmpc_unset_lock()
3088 RELEASE_LOCK(lck, gtid); in __kmpc_unset_lock()
3097 ompt_mutex_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_lock()
3138 kmp_user_lock_p lck; in __kmpc_unset_nest_lock()
3143 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_unset_nest_lock()
3175 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_nest_lock()
3180 ompt_mutex_scope_end, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_nest_lock()
3187 lck = (kmp_user_lock_p)user_lock; in __kmpc_unset_nest_lock()
3192 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_unset_nest_lock()
3194 lck = (kmp_user_lock_p)user_lock; in __kmpc_unset_nest_lock()
3198 lck = __kmp_lookup_user_lock(user_lock, "omp_unset_nest_lock"); in __kmpc_unset_nest_lock()
3202 __kmp_itt_lock_releasing(lck); in __kmpc_unset_nest_lock()
3206 release_status = RELEASE_NESTED_LOCK(lck, gtid); in __kmpc_unset_nest_lock()
3217 ompt_mutex_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_nest_lock()
3222 ompt_mutex_scope_end, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_unset_nest_lock()
3284 kmp_user_lock_p lck; in __kmpc_test_lock() local
3288 (sizeof(lck->tas.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_test_lock()
3289 lck = (kmp_user_lock_p)user_lock; in __kmpc_test_lock()
3293 (sizeof(lck->futex.lk.poll) <= OMP_LOCK_T_SIZE)) { in __kmpc_test_lock()
3294 lck = (kmp_user_lock_p)user_lock; in __kmpc_test_lock()
3298 lck = __kmp_lookup_user_lock(user_lock, "omp_test_lock"); in __kmpc_test_lock()
3302 __kmp_itt_lock_acquiring(lck); in __kmpc_test_lock()
3312 (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_test_lock()
3316 rc = TEST_LOCK(lck, gtid); in __kmpc_test_lock()
3319 __kmp_itt_lock_acquired(lck); in __kmpc_test_lock()
3321 __kmp_itt_lock_cancelled(lck); in __kmpc_test_lock()
3327 ompt_mutex_test_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_test_lock()
3387 kmp_user_lock_p lck; in __kmpc_test_nest_lock()
3391 (sizeof(lck->tas.lk.poll) + sizeof(lck->tas.lk.depth_locked) <= in __kmpc_test_nest_lock()
3393 lck = (kmp_user_lock_p)user_lock; in __kmpc_test_nest_lock()
3397 (sizeof(lck->futex.lk.poll) + sizeof(lck->futex.lk.depth_locked) <= in __kmpc_test_nest_lock()
3399 lck = (kmp_user_lock_p)user_lock; in __kmpc_test_nest_lock()
3403 lck = __kmp_lookup_user_lock(user_lock, "omp_test_nest_lock"); in __kmpc_test_nest_lock()
3407 __kmp_itt_lock_acquiring(lck); in __kmpc_test_nest_lock()
3419 __ompt_get_mutex_impl_type(), (ompt_wait_id_t)(uintptr_t)lck, in __kmpc_test_nest_lock()
3424 rc = TEST_NESTED_LOCK(lck, gtid); in __kmpc_test_nest_lock()
3427 __kmp_itt_lock_acquired(lck); in __kmpc_test_nest_lock()
3429 __kmp_itt_lock_cancelled(lck); in __kmpc_test_nest_lock()
3438 ompt_mutex_test_nest_lock, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_test_nest_lock()
3444 ompt_mutex_scope_begin, (ompt_wait_id_t)(uintptr_t)lck, codeptr); in __kmpc_test_nest_lock()
3481 kmp_user_lock_p lck; in __kmp_enter_critical_section_reduce_block() local
3500 lck = (kmp_user_lock_p)lk; in __kmp_enter_critical_section_reduce_block()
3501 KMP_DEBUG_ASSERT(lck != NULL); in __kmp_enter_critical_section_reduce_block()
3503 __kmp_push_sync(global_tid, ct_critical, loc, lck, __kmp_user_lock_seq); in __kmp_enter_critical_section_reduce_block()
3508 lck = ilk->lock; in __kmp_enter_critical_section_reduce_block()
3509 KMP_DEBUG_ASSERT(lck != NULL); in __kmp_enter_critical_section_reduce_block()
3511 __kmp_push_sync(global_tid, ct_critical, loc, lck, __kmp_user_lock_seq); in __kmp_enter_critical_section_reduce_block()
3513 KMP_I_LOCK_FUNC(ilk, set)(lck, global_tid); in __kmp_enter_critical_section_reduce_block()
3522 lck = (kmp_user_lock_p)crit; in __kmp_enter_critical_section_reduce_block()
3524 lck = __kmp_get_critical_section_ptr(crit, loc, global_tid); in __kmp_enter_critical_section_reduce_block()
3526 KMP_DEBUG_ASSERT(lck != NULL); in __kmp_enter_critical_section_reduce_block()
3529 __kmp_push_sync(global_tid, ct_critical, loc, lck); in __kmp_enter_critical_section_reduce_block()
3531 __kmp_acquire_user_lock_with_checks(lck, global_tid); in __kmp_enter_critical_section_reduce_block()
3541 kmp_user_lock_p lck; in __kmp_end_critical_section_reduce_block() local
3546 lck = (kmp_user_lock_p)crit; in __kmp_end_critical_section_reduce_block()
3549 KMP_D_LOCK_FUNC(lck, unset)((kmp_dyna_lock_t *)lck, global_tid); in __kmp_end_critical_section_reduce_block()
3564 lck = *((kmp_user_lock_p *)crit); in __kmp_end_critical_section_reduce_block()
3565 KMP_ASSERT(lck != NULL); in __kmp_end_critical_section_reduce_block()
3567 lck = (kmp_user_lock_p)crit; in __kmp_end_critical_section_reduce_block()
3573 __kmp_release_user_lock_with_checks(lck, global_tid); in __kmp_end_critical_section_reduce_block()
3633 kmp_critical_name *lck) { in __kmpc_reduce_nowait() argument
3684 loc, global_tid, num_vars, reduce_size, reduce_data, reduce_func, lck); in __kmpc_reduce_nowait()
3692 __kmp_enter_critical_section_reduce_block(loc, global_tid, lck); in __kmpc_reduce_nowait()
3791 kmp_critical_name *lck) { in __kmpc_end_reduce_nowait() argument
3804 __kmp_end_critical_section_reduce_block(loc, global_tid, lck); in __kmpc_end_reduce_nowait()
3862 kmp_critical_name *lck) { in __kmpc_reduce() argument
3897 loc, global_tid, num_vars, reduce_size, reduce_data, reduce_func, lck); in __kmpc_reduce()
3905 __kmp_enter_critical_section_reduce_block(loc, global_tid, lck); in __kmpc_reduce()
3982 kmp_critical_name *lck) { in __kmpc_end_reduce() argument
4002 __kmp_end_critical_section_reduce_block(loc, global_tid, lck); in __kmpc_end_reduce()