/external/llvm-project/openmp/runtime/src/ |
D | kmp_barrier.cpp | 47 enum barrier_type bt, kmp_info_t *this_thr, int gtid, int tid, in __kmp_linear_barrier_gather_template() argument 50 kmp_team_t *team = this_thr->th.th_team; in __kmp_linear_barrier_gather_template() 51 kmp_bstate_t *thr_bar = &this_thr->th.th_bar[bt].bb; in __kmp_linear_barrier_gather_template() 58 KMP_DEBUG_ASSERT(this_thr == other_threads[this_thr->th.th_info.ds.ds_tid]); in __kmp_linear_barrier_gather_template() 63 this_thr->th.th_bar_arrive_time = this_thr->th.th_bar_min_time = in __kmp_linear_barrier_gather_template() 80 ANNOTATE_BARRIER_BEGIN(this_thr); in __kmp_linear_barrier_gather_template() 85 int nproc = this_thr->th.th_team_nproc; in __kmp_linear_barrier_gather_template() 107 if (flag.wait(this_thr, FALSE USE_ITT_BUILD_ARG(itt_sync_obj))) in __kmp_linear_barrier_gather_template() 112 flag.wait(this_thr, FALSE USE_ITT_BUILD_ARG(itt_sync_obj)); in __kmp_linear_barrier_gather_template() 119 this_thr->th.th_bar_min_time = KMP_MIN( in __kmp_linear_barrier_gather_template() [all …]
|
D | kmp_wait_release.h | 130 static void __ompt_implicit_task_end(kmp_info_t *this_thr, in __ompt_implicit_task_end() argument 133 int ds_tid = this_thr->th.th_info.ds.ds_tid; in __ompt_implicit_task_end() 135 this_thr->th.ompt_thread_info.state = ompt_state_overhead; in __ompt_implicit_task_end() 151 int flags = this_thr->th.ompt_thread_info.parallel_flags; in __ompt_implicit_task_end() 158 this_thr->th.ompt_thread_info.state = ompt_state_idle; in __ompt_implicit_task_end() 160 this_thr->th.ompt_thread_info.state = ompt_state_overhead; in __ompt_implicit_task_end() 174 __kmp_wait_template(kmp_info_t *this_thr, in __kmp_wait_template() argument 195 th_gtid = this_thr->th.th_info.ds.ds_gtid; in __kmp_wait_template() 197 kmp_team_t *team = this_thr->th.th_team; in __kmp_wait_template() 203 KMP_ATOMIC_ST_REL(&this_thr->th.th_blocking, true); in __kmp_wait_template() [all …]
|
D | kmp_cancel.cpp | 30 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancel() local 49 kmp_team_t *this_team = this_thr->th.th_team; in __kmpc_cancel() 84 task = this_thr->th.th_current_task; in __kmpc_cancel() 137 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancellationpoint() local 157 kmp_team_t *this_team = this_thr->th.th_team; in __kmpc_cancellationpoint() 197 task = this_thr->th.th_current_task; in __kmpc_cancellationpoint() 245 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_cancel_barrier() local 246 kmp_team_t *this_team = this_thr->th.th_team; in __kmpc_cancel_barrier() 311 kmp_info_t *this_thr = __kmp_entry_thread(); in __kmp_get_cancellation_status() local 317 kmp_team_t *this_team = this_thr->th.th_team; in __kmp_get_cancellation_status() [all …]
|
D | kmp_runtime.cpp | 791 kmp_info_t *this_thr = parent_team->t.t_threads[master_tid]; in __kmp_reserve_threads() local 881 int cg_nthreads = this_thr->th.th_cg_roots->cg_nthreads; in __kmp_reserve_threads() 882 int max_cg_threads = this_thr->th.th_cg_roots->cg_thread_limit; in __kmp_reserve_threads() 1142 kmp_info_t *this_thr; in __kmp_serialized_parallel() local 1156 this_thr = __kmp_threads[global_tid]; in __kmp_serialized_parallel() 1157 serial_team = this_thr->th.th_serial_team; in __kmp_serialized_parallel() 1165 this_thr->th.th_task_team == in __kmp_serialized_parallel() 1166 this_thr->th.th_team->t.t_task_team[this_thr->th.th_task_state]); in __kmp_serialized_parallel() 1167 KMP_DEBUG_ASSERT(serial_team->t.t_task_team[this_thr->th.th_task_state] == in __kmp_serialized_parallel() 1171 global_tid, this_thr->th.th_task_team, this_thr->th.th_team)); in __kmp_serialized_parallel() [all …]
|
D | kmp_csupport.cpp | 367 kmp_info_t *this_thr = __kmp_threads[gtid]; in __kmpc_fork_teams() local 382 this_thr->th.th_teams_microtask = microtask; in __kmpc_fork_teams() 383 this_thr->th.th_teams_level = in __kmpc_fork_teams() 384 this_thr->th.th_team->t.t_level; // AC: can be >0 on host in __kmpc_fork_teams() 387 kmp_team_t *parent_team = this_thr->th.th_team; in __kmpc_fork_teams() 398 if (this_thr->th.th_teams_size.nteams == 0) { in __kmpc_fork_teams() 401 KMP_DEBUG_ASSERT(this_thr->th.th_set_nproc >= 1); in __kmpc_fork_teams() 402 KMP_DEBUG_ASSERT(this_thr->th.th_teams_size.nteams >= 1); in __kmpc_fork_teams() 403 KMP_DEBUG_ASSERT(this_thr->th.th_teams_size.nth >= 1); in __kmpc_fork_teams() 417 KMP_DEBUG_ASSERT(this_thr->th.th_cg_roots); in __kmpc_fork_teams() [all …]
|
D | kmp_wait_release.cpp | 15 void __kmp_wait_64(kmp_info_t *this_thr, kmp_flag_64<> *flag, in __kmp_wait_64() argument 19 this_thr, flag USE_ITT_BUILD_ARG(itt_sync_obj)); in __kmp_wait_64() 22 this_thr, flag USE_ITT_BUILD_ARG(itt_sync_obj)); in __kmp_wait_64()
|
D | ompt-specific.h | 136 #define OMPT_REDUCTION_DECL(this_thr, gtid) \ argument 137 ompt_data_t *my_task_data = OMPT_CUR_TASK_DATA(this_thr); \ 138 ompt_data_t *my_parallel_data = OMPT_CUR_TEAM_DATA(this_thr); \ 153 #define OMPT_REDUCTION_DECL(this_thr, gtid) argument
|
D | kmp_tasking.cpp | 28 kmp_info_t *this_thr); 439 void __kmp_pop_current_task_from_thread(kmp_info_t *this_thr) { in __kmp_pop_current_task_from_thread() argument 443 0, this_thr, this_thr->th.th_current_task, in __kmp_pop_current_task_from_thread() 444 this_thr->th.th_current_task->td_parent)); in __kmp_pop_current_task_from_thread() 446 this_thr->th.th_current_task = this_thr->th.th_current_task->td_parent; in __kmp_pop_current_task_from_thread() 451 0, this_thr, this_thr->th.th_current_task, in __kmp_pop_current_task_from_thread() 452 this_thr->th.th_current_task->td_parent)); in __kmp_pop_current_task_from_thread() 461 void __kmp_push_current_task_to_thread(kmp_info_t *this_thr, kmp_team_t *team, in __kmp_push_current_task_to_thread() argument 468 tid, this_thr, this_thr->th.th_current_task, in __kmp_push_current_task_to_thread() 471 KMP_DEBUG_ASSERT(this_thr != NULL); in __kmp_push_current_task_to_thread() [all …]
|
D | kmp_alloc.cpp | 1893 void *___kmp_fast_allocate(kmp_info_t *this_thr, size_t size KMP_SRC_LOC_DECL) { in ___kmp_fast_allocate() argument 1903 __kmp_gtid_from_thread(this_thr), (int)size KMP_SRC_LOC_PARM)); in ___kmp_fast_allocate() 1924 ptr = this_thr->th.th_free_lists[index].th_free_list_self; in ___kmp_fast_allocate() 1927 this_thr->th.th_free_lists[index].th_free_list_self = *((void **)ptr); in ___kmp_fast_allocate() 1929 this_thr == in ___kmp_fast_allocate() 1934 ptr = TCR_SYNC_PTR(this_thr->th.th_free_lists[index].th_free_list_sync); in ___kmp_fast_allocate() 1940 &this_thr->th.th_free_lists[index].th_free_list_sync, ptr, nullptr)) { in ___kmp_fast_allocate() 1942 ptr = TCR_SYNC_PTR(this_thr->th.th_free_lists[index].th_free_list_sync); in ___kmp_fast_allocate() 1946 this_thr->th.th_free_lists[index].th_free_list_self = *((void **)ptr); in ___kmp_fast_allocate() 1948 this_thr == in ___kmp_fast_allocate() [all …]
|
D | z_Windows_NT_util.cpp | 958 kmp_info_t *this_thr = (kmp_info_t *)arg; in __kmp_launch_worker() local 961 gtid = this_thr->th.th_info.ds.ds_gtid; in __kmp_launch_worker() 987 KMP_FSYNC_RELEASING(&this_thr->th.th_info.ds.ds_alive); in __kmp_launch_worker() 988 this_thr->th.th_info.ds.ds_thread_id = GetCurrentThreadId(); in __kmp_launch_worker() 989 TCW_4(this_thr->th.th_info.ds.ds_alive, TRUE); in __kmp_launch_worker() 993 TCW_PTR(this_thr->th.th_info.ds.ds_stackbase, &stack_data); in __kmp_launch_worker() 994 KMP_ASSERT(this_thr->th.th_info.ds.ds_stackgrow == FALSE); in __kmp_launch_worker() 995 __kmp_check_stack_overlap(this_thr); in __kmp_launch_worker() 998 exit_val = __kmp_launch_thread(this_thr); in __kmp_launch_worker() 999 KMP_FSYNC_RELEASING(&this_thr->th.th_info.ds.ds_alive); in __kmp_launch_worker() [all …]
|
D | kmp_lock.cpp | 1039 static void __kmp_dump_queuing_lock(kmp_info_t *this_thr, kmp_int32 gtid, in __kmp_dump_queuing_lock() argument 1057 gtid + 1, this_thr->th.th_spin_here, in __kmp_dump_queuing_lock() 1058 this_thr->th.th_next_waiting, head_id, tail_id); in __kmp_dump_queuing_lock() 1090 kmp_info_t *this_thr = __kmp_thread_from_gtid(gtid); in __kmp_acquire_queuing_lock_timed_template() local 1104 KMP_DEBUG_ASSERT(this_thr != NULL); in __kmp_acquire_queuing_lock_timed_template() 1105 spin_here_p = &this_thr->th.th_spin_here; in __kmp_acquire_queuing_lock_timed_template() 1110 __kmp_dump_queuing_lock(this_thr, gtid, lck, *head_id_p, *tail_id_p); in __kmp_acquire_queuing_lock_timed_template() 1111 if (this_thr->th.th_next_waiting != 0) in __kmp_acquire_queuing_lock_timed_template() 1112 __kmp_dump_queuing_lock(this_thr, gtid, lck, *head_id_p, *tail_id_p); in __kmp_acquire_queuing_lock_timed_template() 1115 KMP_DEBUG_ASSERT(this_thr->th.th_next_waiting == 0); in __kmp_acquire_queuing_lock_timed_template() [all …]
|
D | kmp.h | 3279 extern void *___kmp_fast_allocate(kmp_info_t *this_thr, 3281 extern void ___kmp_fast_free(kmp_info_t *this_thr, void *ptr KMP_SRC_LOC_DECL); 3282 extern void __kmp_free_fast_memory(kmp_info_t *this_thr); 3283 extern void __kmp_initialize_fast_memory(kmp_info_t *this_thr); 3284 #define __kmp_fast_allocate(this_thr, size) \ argument 3285 ___kmp_fast_allocate((this_thr), (size)KMP_SRC_LOC_CURR) 3286 #define __kmp_fast_free(this_thr, ptr) \ argument 3287 ___kmp_fast_free((this_thr), (ptr)KMP_SRC_LOC_CURR) 3388 extern void __kmp_wait_64(kmp_info_t *this_thr, kmp_flag_64<> *flag, 3573 kmp_info_t *this_thr, [all …]
|
D | kmp_ftn_entry.h | 358 kmp_info_t *this_thr = __kmp_threads[__kmp_entry_gtid()]; in FTN_CONTROL_TOOL() 359 ompt_task_info_t *parent_task_info = OMPT_CUR_TASK_INFO(this_thr); in FTN_CONTROL_TOOL()
|
D | kmp_gsupport.cpp | 196 kmp_info_t *this_thr = __kmp_threads[gtid]; in KMP_EXPAND_NAME() local 197 kmp_team_t *team = this_thr->th.th_team; in KMP_EXPAND_NAME()
|