Lines Matching refs:kmp_int32
33 static void __kmp_bottom_half_finish_proxy(kmp_int32 gtid, kmp_task_t *ptask);
44 static void __kmp_trace_task_stack(kmp_int32 gtid, in __kmp_trace_task_stack()
49 kmp_int32 entries = task_stack->ts_entries; in __kmp_trace_task_stack()
98 static void __kmp_init_task_stack(kmp_int32 gtid, in __kmp_init_task_stack()
119 static void __kmp_free_task_stack(kmp_int32 gtid, in __kmp_free_task_stack()
148 static void __kmp_push_task_stack(kmp_int32 gtid, kmp_info_t *thread, in __kmp_push_task_stack()
207 static void __kmp_pop_task_stack(kmp_int32 gtid, kmp_info_t *thread, in __kmp_pop_task_stack()
253 static bool __kmp_task_is_allowed(int gtid, const kmp_int32 is_constrained, in __kmp_task_is_allowed()
265 kmp_int32 level = current->td_level; in __kmp_task_is_allowed()
300 kmp_int32 size = TASK_DEQUE_SIZE(thread_data->td); in __kmp_realloc_task_deque()
302 kmp_int32 new_size = 2 * size; in __kmp_realloc_task_deque()
325 static kmp_int32 __kmp_push_task(kmp_int32 gtid, kmp_task_t *task) { in __kmp_push_task()
329 kmp_int32 tid = __kmp_tid_from_gtid(gtid); in __kmp_push_task()
338 kmp_int32 counter = 1 + KMP_ATOMIC_INC(&taskdata->td_untied_count); in __kmp_push_task()
497 static void __kmp_task_start(kmp_int32 gtid, kmp_task_t *task, in __kmp_task_start()
561 kmp_int32 gtid) { in __ompt_task_start()
598 static void __kmpc_omp_task_begin_if0_template(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_begin_if0_template()
612 kmp_int32 counter = 1 + KMP_ATOMIC_INC(&taskdata->td_untied_count); in __kmpc_omp_task_begin_if0_template()
649 static void __kmpc_omp_task_begin_if0_ompt(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_begin_if0_ompt()
664 void __kmpc_omp_task_begin_if0(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_begin_if0()
681 void __kmpc_omp_task_begin(ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *task) { in __kmpc_omp_task_begin()
702 static void __kmp_free_task(kmp_int32 gtid, kmp_taskdata_t *taskdata, in __kmp_free_task()
734 static void __kmp_free_task_and_ancestors(kmp_int32 gtid, in __kmp_free_task_and_ancestors()
739 kmp_int32 team_serial = in __kmp_free_task_and_ancestors()
744 kmp_int32 children = KMP_ATOMIC_DEC(&taskdata->td_allocated_child_tasks) - 1; in __kmp_free_task_and_ancestors()
772 RCAST(kmp_int32 *, &taskdata->td_flags), in __kmp_free_task_and_ancestors()
773 *RCAST(kmp_int32 *, &flags_old), in __kmp_free_task_and_ancestors()
774 *RCAST(kmp_int32 *, &flags_new))) { in __kmp_free_task_and_ancestors()
806 static void __kmp_task_finish(kmp_int32 gtid, kmp_task_t *task, in __kmp_task_finish()
812 kmp_int32 children = 0; in __kmp_task_finish()
830 kmp_int32 counter = KMP_ATOMIC_DEC(&taskdata->td_untied_count) - 1; in __kmp_task_finish()
972 kmp_int32 gtid, in __kmpc_omp_task_complete_if0_template()
997 void __kmpc_omp_task_complete_if0_ompt(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_complete_if0_ompt()
1008 void __kmpc_omp_task_complete_if0(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_complete_if0()
1022 void __kmpc_omp_task_complete(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_complete()
1119 if (KMP_COMPARE_AND_STORE_ACQ32(RCAST(kmp_int32 *, &task->td_flags), in __kmp_finish_implicit_task()
1120 *RCAST(kmp_int32 *, &flags_old), in __kmp_finish_implicit_task()
1121 *RCAST(kmp_int32 *, &flags_new))) { in __kmp_finish_implicit_task()
1167 kmp_task_t *__kmp_task_alloc(ident_t *loc_ref, kmp_int32 gtid, in __kmp_task_alloc()
1183 gtid, loc_ref, *((kmp_int32 *)flags), sizeof_kmp_task_t, in __kmp_task_alloc()
1228 kmp_int32 tid = thread->th.th_info.ds.ds_tid; in __kmp_task_alloc()
1368 kmp_task_t *__kmpc_omp_task_alloc(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_alloc()
1369 kmp_int32 flags, size_t sizeof_kmp_task_t, in __kmpc_omp_task_alloc()
1392 kmp_task_t *__kmpc_omp_target_task_alloc(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_target_task_alloc()
1393 kmp_int32 flags, in __kmpc_omp_target_task_alloc()
1415 kmp_int32
1416 __kmpc_omp_reg_task_with_affinity(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_reg_task_with_affinity()
1417 kmp_task_t *new_task, kmp_int32 naffins, in __kmpc_omp_reg_task_with_affinity()
1427 static void __kmp_invoke_task(kmp_int32 gtid, kmp_task_t *task, in __kmp_invoke_task()
1542 kmp_int32 kmp_itt_count_task = in __kmp_invoke_task()
1609 kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task_parts()
1667 kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task, in __kmp_omp_task()
1698 kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_task()
1700 kmp_int32 res; in __kmpc_omp_task()
1765 kmp_int32 __kmp_omp_taskloop_task(ident_t *loc_ref, kmp_int32 gtid, in __kmp_omp_taskloop_task()
1767 kmp_int32 res; in __kmp_omp_taskloop_task()
1808 static kmp_int32 __kmpc_omp_taskwait_template(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_taskwait_template()
1916 static kmp_int32 __kmpc_omp_taskwait_ompt(ident_t *loc_ref, kmp_int32 gtid, in __kmpc_omp_taskwait_ompt()
1926 kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid) { in __kmpc_omp_taskwait()
1938 kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid, int end_part) { in __kmpc_omp_taskyield()
2107 kmp_int32 nth = thread->th.th_team_nproc; in __kmp_task_reduction_init()
2222 kmp_int32 nth = thread->th.th_team_nproc; in __kmpc_task_reduction_get_th_data()
2231 kmp_int32 num = tg->reduce_num_data; in __kmpc_task_reduction_get_th_data()
2232 kmp_int32 tid = thread->th.th_info.ds.ds_tid; in __kmpc_task_reduction_get_th_data()
2278 kmp_int32 nth = th->th.th_team_nproc; in __kmp_task_reduction_fini()
2281 kmp_int32 num = tg->reduce_num_data; in __kmp_task_reduction_fini()
2328 kmp_int32 nth = thr->th.th_team_nproc; in __kmp_task_reduction_modifier_init()
2597 static kmp_task_t *__kmp_remove_my_task(kmp_info_t *thread, kmp_int32 gtid, in __kmp_remove_my_task()
2599 kmp_int32 is_constrained) { in __kmp_remove_my_task()
2669 static kmp_task_t *__kmp_steal_task(kmp_info_t *victim_thr, kmp_int32 gtid, in __kmp_steal_task()
2671 std::atomic<kmp_int32> *unfinished_threads, in __kmp_steal_task()
2673 kmp_int32 is_constrained) { in __kmp_steal_task()
2678 kmp_int32 target; in __kmp_steal_task()
2679 kmp_int32 victim_tid; in __kmp_steal_task()
2772 kmp_int32 count; in __kmp_steal_task()
2809 kmp_info_t *thread, kmp_int32 gtid, C *flag, int final_spin, in __kmp_execute_tasks_template()
2811 kmp_int32 is_constrained) { in __kmp_execute_tasks_template()
2817 std::atomic<kmp_int32> *unfinished_threads; in __kmp_execute_tasks_template()
2818 kmp_int32 nthreads, victim_tid = -2, use_own_tasks = 1, new_victim = 0, in __kmp_execute_tasks_template()
2971 kmp_int32 count; in __kmp_execute_tasks_template()
3016 kmp_info_t *thread, kmp_int32 gtid, kmp_flag_32<C, S> *flag, int final_spin, in __kmp_execute_tasks_32()
3018 kmp_int32 is_constrained) { in __kmp_execute_tasks_32()
3026 kmp_info_t *thread, kmp_int32 gtid, kmp_flag_64<C, S> *flag, int final_spin, in __kmp_execute_tasks_64()
3028 kmp_int32 is_constrained) { in __kmp_execute_tasks_64()
3035 kmp_info_t *thread, kmp_int32 gtid, kmp_flag_oncore *flag, int final_spin, in __kmp_execute_tasks_oncore()
3037 kmp_int32 is_constrained) { in __kmp_execute_tasks_oncore()
3044 __kmp_execute_tasks_32<false, false>(kmp_info_t *, kmp_int32,
3046 int *USE_ITT_BUILD_ARG(void *), kmp_int32);
3048 template int __kmp_execute_tasks_64<false, true>(kmp_info_t *, kmp_int32,
3052 kmp_int32);
3054 template int __kmp_execute_tasks_64<true, false>(kmp_info_t *, kmp_int32,
3058 kmp_int32);
3223 kmp_int32 nthreads, maxthreads; in __kmp_realloc_task_threads_data()
3673 static bool __kmp_give_task(kmp_info_t *thread, kmp_int32 tid, kmp_task_t *task, in __kmp_give_task()
3674 kmp_int32 pass) { in __kmp_give_task()
3786 kmp_int32 children = 0; in __kmp_second_top_half_finish_proxy()
3797 static void __kmp_bottom_half_finish_proxy(kmp_int32 gtid, kmp_task_t *ptask) { in __kmp_bottom_half_finish_proxy()
3822 void __kmpc_proxy_task_completed(kmp_int32 gtid, kmp_task_t *ptask) { in __kmpc_proxy_task_completed()
3863 kmp_int32 nthreads = team->t.t_nproc; in __kmpc_proxy_task_completed_ooo()
3868 kmp_int32 start_k = 0; in __kmpc_proxy_task_completed_ooo()
3869 kmp_int32 pass = 1; in __kmpc_proxy_task_completed_ooo()
3870 kmp_int32 k = start_k; in __kmpc_proxy_task_completed_ooo()
4025 typedef void (*p_task_dup_t)(kmp_task_t *, kmp_task_t *, kmp_int32);
4060 kmp_int32 *lb = RCAST(kmp_int32 *, task->shareds); in get_lb()
4081 kmp_int32 *ub = RCAST(kmp_int32 *, task->shareds) + 1; in get_ub()
4168 kmp_int32 lastpriv = 0; in __kmp_taskloop_linear()