/freebsd/contrib/llvm-project/openmp/runtime/src/ |
H A D | ompt-specific.h | 30 void __ompt_lw_taskteam_init(ompt_lw_taskteam_t *lwt, kmp_info_t *thr, int gtid, 33 void __ompt_lw_taskteam_link(ompt_lw_taskteam_t *lwt, kmp_info_t *thr, 36 void __ompt_lw_taskteam_unlink(kmp_info_t *thr); 62 ompt_sync_region_t __ompt_get_barrier_kind(enum barrier_type, kmp_info_t *); 80 kmp_info_t *thr = __kmp_threads[gtid]; in __ompt_load_return_address() 115 inline kmp_info_t *ompt_get_thread_gtid(int gtid) { in ompt_get_thread_gtid() 119 inline kmp_info_t *ompt_get_thread() { in ompt_get_thread() 124 inline void ompt_set_thread_state(kmp_info_t *thread, ompt_state_t state) { in ompt_set_thread_state()
|
H A D | ompt-specific.cpp | 52 kmp_info_t *thr = ompt_get_thread(); in __ompt_get_teaminfo() 105 kmp_info_t *thr = ompt_get_thread(); in __ompt_get_task_info_object() 145 kmp_info_t *thr = ompt_get_thread(); in __ompt_get_scheduling_taskinfo() 203 kmp_info_t *thread = ompt_get_thread(); in __ompt_get_thread_data_internal() 216 kmp_info_t *ti = ompt_get_thread(); in __ompt_thread_assign_wait_id() 223 kmp_info_t *ti = ompt_get_thread(); in __ompt_get_state_internal() 260 void __ompt_lw_taskteam_init(ompt_lw_taskteam_t *lwt, kmp_info_t *thr, int gtid, in __ompt_lw_taskteam_init() 274 void __ompt_lw_taskteam_link(ompt_lw_taskteam_t *lwt, kmp_info_t *thr, in __ompt_lw_taskteam_link() 318 void __ompt_lw_taskteam_unlink(kmp_info_t *thr) { in __ompt_lw_taskteam_unlink() 348 kmp_info_t *thr = ompt_get_thread(); in __ompt_get_task_data() [all …]
|
H A D | kmp_wait_release.h | 107 kmp_info_t *waiting_threads[1]; /**< Threads sleeping on this thread. */ 123 kmp_info_t *get_waiter(kmp_uint32 i) { 131 void set_waiter(kmp_info_t *thr) { 149 kmp_flag_native(volatile PtrType *p, kmp_info_t *thr) 242 kmp_flag_atomic(std::atomic<PtrType> *p, kmp_info_t *thr) 322 static void __ompt_implicit_task_end(kmp_info_t *this_thr, 368 __kmp_wait_template(kmp_info_t *this_thr, 715 kmp_info_t *th = __kmp_threads[th_gtid]; 811 kmp_info_t *waiter = flag->get_waiter(i); 830 kmp_flag_32(std::atomic<kmp_uint32> *p, kmp_info_t *thr) [all …]
|
H A D | kmp_alloc.cpp | 46 static void bpool(kmp_info_t *th, void *buffer, bufsize len); 47 static void *bget(kmp_info_t *th, bufsize size); 48 static void *bgetz(kmp_info_t *th, bufsize size); 49 static void *bgetr(kmp_info_t *th, void *buffer, bufsize newsize); 50 static void brel(kmp_info_t *th, void *buf); 51 static void bectl(kmp_info_t *th, bget_compact_t compact, 138 kmp_info_t *bthr; /* The thread which owns the buffer pool */ 227 static void set_thr_data(kmp_info_t *th) { in set_thr_data() 253 static thr_data_t *get_thr_data(kmp_info_t *th) { in get_thr_data() 264 static void __kmp_bget_dequeue(kmp_info_t *th) { in __kmp_bget_dequeue() [all …]
|
H A D | kmp.h | 3089 } kmp_info_t; typedef 3181 KMP_ALIGN_CACHE kmp_info_t **t_threads; 3274 kmp_info_t *r_uber_thread; 3298 kmp_info_t **threads; 3590 extern kmp_info_t **__kmp_threads; /* Descriptors for the threads */ 3595 extern volatile kmp_info_t *__kmp_thread_pool; 3596 extern kmp_info_t *__kmp_thread_pool_insert_pt; 3638 static inline int __kmp_gtid_from_thread(const kmp_info_t *thr) { in __kmp_gtid_from_thread() 3643 static inline kmp_info_t *__kmp_thread_from_gtid(int gtid) { in __kmp_thread_from_gtid() 3676 extern kmp_info_t __kmp_monitor; [all …]
|
H A D | kmp_runtime.cpp | 79 kmp_info_t __kmp_monitor; 86 static void __kmp_initialize_info(kmp_info_t *, kmp_team_t *, int tid, 109 static void __kmp_reap_thread(kmp_info_t *thread, int is_root); 110 kmp_info_t *__kmp_thread_pool_insert_pt = NULL; 116 static kmp_nested_nthreads_t *__kmp_override_nested_nth(kmp_info_t *thr, in __kmp_override_nested_nth() 136 kmp_info_t **other_threads; in __kmp_get_global_thread_id() 183 kmp_info_t *thr = (kmp_info_t *)TCR_SYNC_PTR(other_threads[i]); in __kmp_get_global_thread_id() 298 void __kmp_check_stack_overlap(kmp_info_t *th) { in __kmp_check_stack_overlap() 336 kmp_info_t *f_th = (kmp_info_t *)TCR_SYNC_PTR(__kmp_threads[f]); in __kmp_check_stack_overlap() 501 static void __kmp_print_thread_storage_map(kmp_info_t *thr, int gtid) { in __kmp_print_thread_storage_map() [all …]
|
H A D | kmp_taskdeps.h | 21 static inline void __kmp_node_deref(kmp_info_t *thread, kmp_depnode_t *node) { in __kmp_node_deref() 40 static inline void __kmp_depnode_list_free(kmp_info_t *thread, in __kmp_depnode_list_free() 56 static inline void __kmp_dephash_free_entries(kmp_info_t *thread, in __kmp_dephash_free_entries() 83 static inline void __kmp_dephash_free(kmp_info_t *thread, kmp_dephash_t *h) { in __kmp_dephash_free() 112 kmp_info_t *thread = __kmp_threads[gtid]; in __kmp_release_deps()
|
H A D | z_Windows_NT_util.cpp | 203 kmp_info_t *th, C *flag) { in __kmp_win32_cond_wait() 317 void __kmp_suspend_initialize_thread(kmp_info_t *th) { in __kmp_suspend_initialize_thread() 337 void __kmp_suspend_uninitialize_thread(kmp_info_t *th) { in __kmp_suspend_uninitialize_thread() 347 int __kmp_try_suspend_mx(kmp_info_t *th) { in __kmp_try_suspend_mx() 351 void __kmp_lock_suspend_mx(kmp_info_t *th) { in __kmp_lock_suspend_mx() 355 void __kmp_unlock_suspend_mx(kmp_info_t *th) { in __kmp_unlock_suspend_mx() 363 kmp_info_t *th = __kmp_threads[th_gtid]; in __kmp_suspend_template() 491 kmp_info_t *th = __kmp_threads[target_gtid]; in __kmp_resume_template() 926 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_terminate_thread() 994 kmp_info_t *this_th in __kmp_launch_worker() [all...] |
H A D | kmp_barrier.cpp | 217 kmp_info_t **other_threads = team->t.t_threads; in __kmp_dist_barrier_wakeup() 227 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_dist_barrier_gather() 232 kmp_info_t **other_threads; in __kmp_dist_barrier_gather() 376 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_dist_barrier_release() 557 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_linear_barrier_gather_template() 562 kmp_info_t **other_threads = team->t.t_threads; in __kmp_linear_barrier_gather_template() 659 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_linear_barrier_release_template() 668 kmp_info_t **other_threads; in __kmp_linear_barrier_release_template() 766 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_linear_barrier_gather() 773 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_linear_barrier_gather_cancellable() [all …]
|
H A D | kmp_tasking.cpp | 34 kmp_info_t *this_thr); 35 static void __kmp_alloc_task_deque(kmp_info_t *thread, 37 static int __kmp_realloc_task_threads_data(kmp_info_t *thread, 158 static void __kmp_push_task_stack(kmp_int32 gtid, kmp_info_t *thread, in __kmp_push_task_stack() 217 static void __kmp_pop_task_stack(kmp_int32 gtid, kmp_info_t *thread, in __kmp_pop_task_stack() 312 static void __kmp_realloc_task_deque(kmp_info_t *thread, 402 static kmp_int32 __kmp_push_priority_task(kmp_int32 gtid, kmp_info_t *thread, 478 kmp_info_t *thread = __kmp_threads[gtid]; 612 void __kmp_pop_current_task_from_thread(kmp_info_t *this_thr) { 634 void __kmp_push_current_task_to_thread(kmp_info_t *this_thr, kmp_team_t *team, [all …]
|
H A D | kmp_ftn_entry.h | 116 kmp_info_t *thread; in FTN_SET_BLOCKTIME() 367 kmp_info_t *thread; in KMP_EXPAND_NAME() 392 kmp_info_t *this_thr = __kmp_threads[__kmp_entry_gtid()]; in FTN_CONTROL_TOOL() 452 kmp_info_t *th; in ConvertedString() 645 kmp_info_t *thread = __kmp_threads[gtid]; in KMP_EXPAND_NAME() 659 kmp_info_t *thread; in KMP_EXPAND_NAME() 676 kmp_info_t *thread; in KMP_EXPAND_NAME() 687 kmp_info_t *thread; in KMP_EXPAND_NAME() 700 kmp_info_t *thread; 710 kmp_info_t *t in KMP_EXPAND_NAME() [all...] |
H A D | kmp_cancel.cpp | 30 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancel() 137 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancellationpoint() 245 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancel_barrier() 311 kmp_info_t *this_thr = __kmp_entry_thread(); in __kmp_get_cancellation_status()
|
H A D | z_Linux_util.cpp | 442 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_terminate_thread() 462 static kmp_int32 __kmp_set_stack_info(int gtid, kmp_info_t *th) { in __kmp_set_stack_info() 537 gtid = ((kmp_info_t *)thr)->th.th_info.ds.ds_gtid; in __kmp_launch_worker() 544 __kmp_stats_thread_ptr = ((kmp_info_t *)thr)->th.th_stats; in __kmp_launch_worker() 589 __kmp_set_stack_info(gtid, (kmp_info_t *)thr); in __kmp_launch_worker() 591 __kmp_check_stack_overlap((kmp_info_t *)thr); in __kmp_launch_worker() 593 exit_val = __kmp_launch_thread((kmp_info_t *)thr); in __kmp_launch_worker() 630 __kmp_set_stack_info(((kmp_info_t *)thr)->th.th_info.ds.ds_gtid, in __kmp_launch_monitor() 631 (kmp_info_t *)thr); in __kmp_launch_monitor() 633 __kmp_check_stack_overlap((kmp_info_t *)thr); in __kmp_launch_monitor() [all …]
|
H A D | kmp_taskdeps.cpp | 69 static kmp_dephash_t *__kmp_dephash_extend(kmp_info_t *thread, in __kmp_dephash_extend() 125 static kmp_dephash_t *__kmp_dephash_create(kmp_info_t *thread, in __kmp_dephash_create() 157 static kmp_dephash_entry *__kmp_dephash_find(kmp_info_t *thread, in __kmp_dephash_find() 199 static kmp_depnode_list_t *__kmp_add_node(kmp_info_t *thread, in __kmp_add_node() 298 __kmp_depnode_link_successor(kmp_int32 gtid, kmp_info_t *thread, in __kmp_depnode_link_successor() 341 kmp_info_t *thread, in __kmp_depnode_link_successor() 399 kmp_info_t *thread = __kmp_threads[gtid]; in __kmp_process_dep_all() 459 kmp_info_t *thread = __kmp_threads[gtid]; in __kmp_process_deps() 687 kmp_info_t *thread = __kmp_threads[gtid]; in __kmpc_omp_task_with_deps() 914 kmp_info_t *thread = __kmp_threads[gtid]; in __kmpc_omp_taskwait_deps_51()
|
H A D | kmp_csupport.cpp | 335 kmp_info_t *master_th = __kmp_threads[gtid]; in __kmpc_fork_call() 460 kmp_info_t *thread = __kmp_threads[global_tid]; in __kmpc_set_thread_limit() 505 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_fork_teams() 623 kmp_info_t *this_thr; in __kmpc_end_serialized_parallel() 881 kmp_info_t *this_thr = __kmp_threads[global_tid]; in __kmpc_master() 925 kmp_info_t *this_thr = __kmp_threads[global_tid]; in __kmpc_end_master() 971 kmp_info_t *this_thr = __kmp_threads[global_tid]; in __kmpc_masked() 1012 kmp_info_t *this_thr = __kmp_threads[global_tid]; in __kmpc_end_masked() 1037 kmp_info_t *th; in __kmpc_ordered() 1110 kmp_info_t *th; in __kmpc_end_ordered() [all …]
|
H A D | ompd-specific.h | 61 OMPD_ACCESS(kmp_info_t, th) \ 126 OMPD_SIZEOF(kmp_info_t) \
|
H A D | kmp_itt.inl | 70 LINKAGE kmp_itthash_entry *__kmp_itthash_find(kmp_info_t *thread, 133 kmp_info_t *th = __kmp_thread_from_gtid(gtid); 192 kmp_info_t *th = __kmp_thread_from_gtid(gtid); 374 kmp_info_t *th = __kmp_thread_from_gtid(gtid); 424 kmp_info_t *thr = __kmp_thread_from_gtid(gtid); 569 kmp_info_t *thread = __kmp_thread_from_gtid(gtid); 581 kmp_info_t *thread = __kmp_thread_from_gtid(gtid); 803 kmp_info_t *thr = __kmp_thread_from_gtid((gtid)); 847 kmp_info_t *thr = __kmp_thread_from_gtid(gtid); 861 kmp_info_t *t [all...] |
H A D | kmp_wait_release.cpp | 15 void __kmp_wait_64(kmp_info_t *this_thr, kmp_flag_64<> *flag, in __kmp_wait_64()
|
H A D | kmp_gsupport.cpp | 225 kmp_info_t *this_thr = __kmp_threads[gtid]; in KMP_EXPAND_NAME() 384 kmp_info_t *thr; in __kmp_GOMP_microtask_wrapper() 431 kmp_info_t *thr; in __kmp_GOMP_parallel_microtask_wrapper() 465 kmp_info_t *thr = __kmp_threads[gtid]; in __kmp_GOMP_fork_call() 539 kmp_info_t *thr; in KMP_EXPAND_NAME() 1297 kmp_info_t *thread; 1528 kmp_info_t *thr; in KMP_EXPAND_NAME() 1887 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_GOMP_doacross_post() 1910 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_GOMP_doacross_wait() 2031 kmp_info_t *thread = __kmp_threads[gtid]; in KMP_EXPAND_NAME() [all …]
|
H A D | kmp_global.cpp | 450 kmp_info_t **__kmp_threads = NULL; 458 volatile kmp_info_t *__kmp_thread_pool = NULL;
|
H A D | ompt-general.cpp | 493 kmp_info_t *root_thread = ompt_get_thread(); 743 kmp_info_t *thread; 764 kmp_info_t *thread;
|
H A D | kmp_dispatch.cpp | 43 kmp_info_t *th; in __kmp_dispatch_deo_error() 61 kmp_info_t *th; in __kmp_dispatch_dxo_error() 113 kmp_info_t *th = __kmp_threads[__kmp_gtid_from_tid((int)id, team)]; in __kmp_initialize_self_buffer() 198 kmp_info_t *th; in __kmp_dispatch_init_algorithm() 968 kmp_info_t *th; in __kmp_dispatch_init() 1184 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_finish() 1248 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_finish_chunk() 1337 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_next_algorithm() 2200 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_next() 2470 kmp_info_t *th; in __kmpc_sections_init() [all …]
|
H A D | kmp_dispatch.h | 331 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_deo() 407 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_dxo()
|
H A D | kmp_sched.cpp | 101 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_for_static_init() 501 kmp_info_t *th; in __kmp_dist_for_static_init() 771 kmp_info_t *th; in __kmp_team_static_init()
|
H A D | kmp_dispatch_hier.h | 482 kmp_info_t *th = __kmp_threads[gtid]; in next_recurse() 716 kmp_info_t *th = __kmp_threads[gtid]; in next() 928 kmp_info_t *th; in __kmp_dispatch_init_hierarchy()
|