Lines Matching refs:kmp_int32
228 kmp_int32 reserved_1; /**< might be used in Fortran; see above */
229 kmp_int32 flags; /**< also f.flags; KMP_IDENT_xxx flags; KMP_IDENT_KMPC
231 kmp_int32 reserved_2; /**< not really used in Fortran any more; see above */
236 kmp_int32 reserved_3; /**< source[4] in Fortran, do not use for C++ */
242 kmp_int32 get_openmp_version() { in get_openmp_version()
349 enum sched_type : kmp_int32 {
453 (s = (enum sched_type)((kmp_int32)s | (kmp_int32)m))
1477 typedef kmp_int32 kmp_critical_name[8];
1488 typedef void (*kmpc_micro)(kmp_int32 *global_tid, kmp_int32 *bound_tid, ...);
1489 typedef void (*kmpc_micro_bound)(kmp_int32 *bound_tid, kmp_int32 *bound_nth,
1614 kmp_int32 num_active;
1636 kmp_int32 count;
1637 kmp_int32 ub;
1639 kmp_int32 lb;
1640 kmp_int32 st;
1641 kmp_int32 tc;
1642 kmp_int32 static_steal_counter; /* for static_steal only; maybe better to put
1652 kmp_int32 parm1; // structures in kmp_dispatch.cpp. This should
1653 kmp_int32 parm2; // make no real change at least while padding is off.
1654 kmp_int32 parm3;
1655 kmp_int32 parm4;
1664 kmp_int32 last_upper;
1704 kmp_int32 lb;
1705 kmp_int32 ub;
1706 kmp_int32 st;
1707 kmp_int32 tc;
1709 kmp_int32 parm1;
1710 kmp_int32 parm2;
1711 kmp_int32 parm3;
1712 kmp_int32 parm4;
1714 kmp_int32 count;
1719 kmp_int32 last_upper;
1752 kmp_int32 ordered_bumped;
1754 kmp_int32 ordered_dummy[KMP_MAX_ORDERED - 3];
1757 kmp_int32 type_size; /* the size of types in private_info */
1759 kmp_int32 hier_id;
1772 kmp_int32 ordered_dummy[KMP_MAX_ORDERED - 1];
1791 volatile kmp_int32 doacross_buf_idx; // teamwise index
1793 kmp_int32 doacross_num_done; // count finished threads
1815 kmp_int32 th_disp_index;
1816 kmp_int32 th_doacross_buf_idx; // thread's doacross buffer index
1907 kmp_int32 default_device; /* internal control for default device */
1930 kmp_int32 parent_tid;
1931 kmp_int32 old_tid;
2154 extern kmp_int32 __kmp_default_device; // Set via OMP_DEFAULT_DEVICE if
2157 extern kmp_int32 __kmp_max_task_priority;
2177 typedef kmp_int32 (*kmp_routine_entry_t)(kmp_int32, void *);
2180 kmp_int32 priority; /**< priority specified by user for the task */
2194 kmp_int32 part_id; /**< part id for the task */
2207 std::atomic<kmp_int32> count; // number of allocated and incomplete tasks
2208 std::atomic<kmp_int32>
2213 kmp_int32 reduce_num_data; // number of data items to reduce
2245 kmp_int32 mtx_num_locks; /* number of locks in mtx_locks array */
2250 std::atomic<kmp_int32> npredecessors;
2251 std::atomic<kmp_int32> nrefs;
2265 kmp_int32 last_flag;
2284 kmp_int32 reserved : 30;
2313 kmp_int32 ts_entries; // number of entries on the stack
2352 kmp_int32 td_task_id; /* id, assigned by debugger */
2358 kmp_int32 td_level; /* task nesting level */
2359 std::atomic<kmp_int32> td_untied_count; // untied task active parts counter
2364 kmp_int32 td_taskwait_thread; /* gtid + 1 of thread encountered taskwait */
2367 KMP_ALIGN_CACHE std::atomic<kmp_int32>
2370 std::atomic<kmp_int32>
2379 kmp_int32 td_size_alloc; // The size of task structure, including shareds etc.
2382 kmp_int32 td_size_loop_bounds;
2406 kmp_int32 td_deque_size; // Size of deck
2409 kmp_int32 td_deque_ntasks; // Number of tasks in deque
2411 kmp_int32 td_deque_last_stolen; // Thread number of last successful steal
2439 kmp_int32 tt_found_tasks; /* Have we found tasks and queued them while
2442 kmp_int32 tt_nproc; /* #threads in team */
2443 kmp_int32 tt_max_threads; // # entries allocated for threads_data array
2444 kmp_int32 tt_found_proxy_tasks; // found proxy tasks since last barrier
2445 kmp_int32 tt_untied_task_encountered;
2448 std::atomic<kmp_int32> tt_unfinished_threads; /* #threads still active */
2477 kmp_int32 hot_team_nth; // number of threads allocated for the hot_team
2481 kmp_int32 nteams; // number of teams in a league
2482 kmp_int32 nth; // number of threads in each team of the league
2500 kmp_int32 cg_thread_limit;
2501 kmp_int32 cg_nthreads; // Count of active threads in CG rooted at cg_root
2614 KMP_ALIGN_CACHE volatile kmp_int32
2773 std::atomic<kmp_int32> t_cancel_request;
2838 kmp_int32 data;
2981 extern kmp_int32 __kmp_use_yield;
2982 extern kmp_int32 __kmp_use_yield_exp_set;
3174 static inline void __kmp_assert_valid_gtid(kmp_int32 gtid) { in __kmp_assert_valid_gtid()
3192 extern std::atomic<kmp_int32> __kmp_team_counter;
3194 extern std::atomic<kmp_int32> __kmp_task_counter;
3319 extern void __kmpc_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
3320 enum sched_type schedule, kmp_int32 lb,
3321 kmp_int32 ub, kmp_int32 st, kmp_int32 chunk);
3322 extern void __kmpc_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
3324 kmp_uint32 ub, kmp_int32 st,
3325 kmp_int32 chunk);
3326 extern void __kmpc_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
3329 extern void __kmpc_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
3334 extern int __kmpc_dispatch_next_4(ident_t *loc, kmp_int32 gtid,
3335 kmp_int32 *p_last, kmp_int32 *p_lb,
3336 kmp_int32 *p_ub, kmp_int32 *p_st);
3337 extern int __kmpc_dispatch_next_4u(ident_t *loc, kmp_int32 gtid,
3338 kmp_int32 *p_last, kmp_uint32 *p_lb,
3339 kmp_uint32 *p_ub, kmp_int32 *p_st);
3340 extern int __kmpc_dispatch_next_8(ident_t *loc, kmp_int32 gtid,
3341 kmp_int32 *p_last, kmp_int64 *p_lb,
3343 extern int __kmpc_dispatch_next_8u(ident_t *loc, kmp_int32 gtid,
3344 kmp_int32 *p_last, kmp_uint64 *p_lb,
3347 extern void __kmpc_dispatch_fini_4(ident_t *loc, kmp_int32 gtid);
3348 extern void __kmpc_dispatch_fini_8(ident_t *loc, kmp_int32 gtid);
3349 extern void __kmpc_dispatch_fini_4u(ident_t *loc, kmp_int32 gtid);
3350 extern void __kmpc_dispatch_fini_8u(ident_t *loc, kmp_int32 gtid);
3354 extern void __kmp_aux_dispatch_init_4(ident_t *loc, kmp_int32 gtid,
3355 enum sched_type schedule, kmp_int32 lb,
3356 kmp_int32 ub, kmp_int32 st,
3357 kmp_int32 chunk, int push_ws);
3358 extern void __kmp_aux_dispatch_init_4u(ident_t *loc, kmp_int32 gtid,
3360 kmp_uint32 ub, kmp_int32 st,
3361 kmp_int32 chunk, int push_ws);
3362 extern void __kmp_aux_dispatch_init_8(ident_t *loc, kmp_int32 gtid,
3366 extern void __kmp_aux_dispatch_init_8u(ident_t *loc, kmp_int32 gtid,
3370 extern void __kmp_aux_dispatch_fini_chunk_4(ident_t *loc, kmp_int32 gtid);
3371 extern void __kmp_aux_dispatch_fini_chunk_8(ident_t *loc, kmp_int32 gtid);
3372 extern void __kmp_aux_dispatch_fini_chunk_4u(ident_t *loc, kmp_int32 gtid);
3373 extern void __kmp_aux_dispatch_fini_chunk_8u(ident_t *loc, kmp_int32 gtid);
3556 enum fork_context_e fork_context, kmp_int32 argc,
3568 extern void __kmp_serialized_parallel(ident_t *id, kmp_int32 gtid);
3601 extern kmp_task_t *__kmp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
3656 KMP_EXPORT void __kmpc_begin(ident_t *, kmp_int32 flags);
3667 KMP_EXPORT void *__kmpc_threadprivate(ident_t *, kmp_int32 global_tid,
3670 KMP_EXPORT kmp_int32 __kmpc_global_thread_num(ident_t *);
3671 KMP_EXPORT kmp_int32 __kmpc_global_num_threads(ident_t *);
3672 KMP_EXPORT kmp_int32 __kmpc_bound_thread_num(ident_t *);
3673 KMP_EXPORT kmp_int32 __kmpc_bound_num_threads(ident_t *);
3675 KMP_EXPORT kmp_int32 __kmpc_ok_to_fork(ident_t *);
3676 KMP_EXPORT void __kmpc_fork_call(ident_t *, kmp_int32 nargs,
3679 KMP_EXPORT void __kmpc_serialized_parallel(ident_t *, kmp_int32 global_tid);
3680 KMP_EXPORT void __kmpc_end_serialized_parallel(ident_t *, kmp_int32 global_tid);
3683 KMP_EXPORT void __kmpc_barrier(ident_t *, kmp_int32 global_tid);
3684 KMP_EXPORT kmp_int32 __kmpc_master(ident_t *, kmp_int32 global_tid);
3685 KMP_EXPORT void __kmpc_end_master(ident_t *, kmp_int32 global_tid);
3686 KMP_EXPORT void __kmpc_ordered(ident_t *, kmp_int32 global_tid);
3687 KMP_EXPORT void __kmpc_end_ordered(ident_t *, kmp_int32 global_tid);
3688 KMP_EXPORT void __kmpc_critical(ident_t *, kmp_int32 global_tid,
3690 KMP_EXPORT void __kmpc_end_critical(ident_t *, kmp_int32 global_tid,
3692 KMP_EXPORT void __kmpc_critical_with_hint(ident_t *, kmp_int32 global_tid,
3695 KMP_EXPORT kmp_int32 __kmpc_barrier_master(ident_t *, kmp_int32 global_tid);
3696 KMP_EXPORT void __kmpc_end_barrier_master(ident_t *, kmp_int32 global_tid);
3698 KMP_EXPORT kmp_int32 __kmpc_barrier_master_nowait(ident_t *,
3699 kmp_int32 global_tid);
3701 KMP_EXPORT kmp_int32 __kmpc_single(ident_t *, kmp_int32 global_tid);
3702 KMP_EXPORT void __kmpc_end_single(ident_t *, kmp_int32 global_tid);
3704 KMP_EXPORT void KMPC_FOR_STATIC_INIT(ident_t *loc, kmp_int32 global_tid,
3705 kmp_int32 schedtype, kmp_int32 *plastiter,
3710 KMP_EXPORT void __kmpc_for_static_fini(ident_t *loc, kmp_int32 global_tid);
3712 KMP_EXPORT void __kmpc_copyprivate(ident_t *loc, kmp_int32 global_tid,
3715 kmp_int32 didit);
3722 KMP_EXPORT kmp_int32 __kmpc_omp_task(ident_t *loc_ref, kmp_int32 gtid,
3724 KMP_EXPORT kmp_task_t *__kmpc_omp_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
3725 kmp_int32 flags,
3729 KMP_EXPORT kmp_task_t *__kmpc_omp_target_task_alloc(ident_t *loc_ref, kmp_int32 gtid,
3730 kmp_int32 flags,
3735 KMP_EXPORT void __kmpc_omp_task_begin_if0(ident_t *loc_ref, kmp_int32 gtid,
3737 KMP_EXPORT void __kmpc_omp_task_complete_if0(ident_t *loc_ref, kmp_int32 gtid,
3739 KMP_EXPORT kmp_int32 __kmpc_omp_task_parts(ident_t *loc_ref, kmp_int32 gtid,
3741 KMP_EXPORT kmp_int32 __kmpc_omp_taskwait(ident_t *loc_ref, kmp_int32 gtid);
3743 KMP_EXPORT kmp_int32 __kmpc_omp_taskyield(ident_t *loc_ref, kmp_int32 gtid,
3747 void __kmpc_omp_task_begin(ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *task);
3748 void __kmpc_omp_task_complete(ident_t *loc_ref, kmp_int32 gtid,
3757 KMP_EXPORT kmp_int32 __kmpc_omp_task_with_deps(
3758 ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 ndeps,
3759 kmp_depend_info_t *dep_list, kmp_int32 ndeps_noalias,
3761 KMP_EXPORT void __kmpc_omp_wait_deps(ident_t *loc_ref, kmp_int32 gtid,
3762 kmp_int32 ndeps,
3764 kmp_int32 ndeps_noalias,
3766 extern kmp_int32 __kmp_omp_task(kmp_int32 gtid, kmp_task_t *new_task,
3769 KMP_EXPORT kmp_int32 __kmpc_cancel(ident_t *loc_ref, kmp_int32 gtid,
3770 kmp_int32 cncl_kind);
3771 KMP_EXPORT kmp_int32 __kmpc_cancellationpoint(ident_t *loc_ref, kmp_int32 gtid,
3772 kmp_int32 cncl_kind);
3773 KMP_EXPORT kmp_int32 __kmpc_cancel_barrier(ident_t *loc_ref, kmp_int32 gtid);
3776 KMP_EXPORT void __kmpc_proxy_task_completed(kmp_int32 gtid, kmp_task_t *ptask);
3778 KMP_EXPORT void __kmpc_taskloop(ident_t *loc, kmp_int32 gtid, kmp_task_t *task,
3779 kmp_int32 if_val, kmp_uint64 *lb,
3780 kmp_uint64 *ub, kmp_int64 st, kmp_int32 nogroup,
3781 kmp_int32 sched, kmp_uint64 grainsize,
3793 KMP_EXPORT kmp_int32 __kmpc_omp_reg_task_with_affinity(
3794 ident_t *loc_ref, kmp_int32 gtid, kmp_task_t *new_task, kmp_int32 naffins,
3798 KMP_EXPORT void __kmpc_init_lock(ident_t *loc, kmp_int32 gtid,
3800 KMP_EXPORT void __kmpc_init_nest_lock(ident_t *loc, kmp_int32 gtid,
3802 KMP_EXPORT void __kmpc_destroy_lock(ident_t *loc, kmp_int32 gtid,
3804 KMP_EXPORT void __kmpc_destroy_nest_lock(ident_t *loc, kmp_int32 gtid,
3806 KMP_EXPORT void __kmpc_set_lock(ident_t *loc, kmp_int32 gtid, void **user_lock);
3807 KMP_EXPORT void __kmpc_set_nest_lock(ident_t *loc, kmp_int32 gtid,
3809 KMP_EXPORT void __kmpc_unset_lock(ident_t *loc, kmp_int32 gtid,
3811 KMP_EXPORT void __kmpc_unset_nest_lock(ident_t *loc, kmp_int32 gtid,
3813 KMP_EXPORT int __kmpc_test_lock(ident_t *loc, kmp_int32 gtid, void **user_lock);
3814 KMP_EXPORT int __kmpc_test_nest_lock(ident_t *loc, kmp_int32 gtid,
3817 KMP_EXPORT void __kmpc_init_lock_with_hint(ident_t *loc, kmp_int32 gtid,
3819 KMP_EXPORT void __kmpc_init_nest_lock_with_hint(ident_t *loc, kmp_int32 gtid,
3825 KMP_EXPORT kmp_int32 __kmpc_reduce_nowait(
3826 ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
3829 KMP_EXPORT void __kmpc_end_reduce_nowait(ident_t *loc, kmp_int32 global_tid,
3831 KMP_EXPORT kmp_int32 __kmpc_reduce(
3832 ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
3835 KMP_EXPORT void __kmpc_end_reduce(ident_t *loc, kmp_int32 global_tid,
3841 ident_t *loc, kmp_int32 global_tid, kmp_int32 num_vars, size_t reduce_size,
3846 KMP_EXPORT kmp_int32 __kmp_get_reduce_method(void);
3854 KMP_EXPORT kmp_int32 __kmpc_in_parallel(ident_t *loc);
3855 KMP_EXPORT void __kmpc_pop_num_threads(ident_t *loc, kmp_int32 global_tid);
3856 KMP_EXPORT void __kmpc_push_num_threads(ident_t *loc, kmp_int32 global_tid,
3857 kmp_int32 num_threads);
3859 KMP_EXPORT void __kmpc_push_proc_bind(ident_t *loc, kmp_int32 global_tid,
3861 KMP_EXPORT void __kmpc_push_num_teams(ident_t *loc, kmp_int32 global_tid,
3862 kmp_int32 num_teams,
3863 kmp_int32 num_threads);
3864 KMP_EXPORT void __kmpc_fork_teams(ident_t *loc, kmp_int32 argc,
3871 KMP_EXPORT void __kmpc_doacross_init(ident_t *loc, kmp_int32 gtid,
3872 kmp_int32 num_dims,
3874 KMP_EXPORT void __kmpc_doacross_wait(ident_t *loc, kmp_int32 gtid,
3876 KMP_EXPORT void __kmpc_doacross_post(ident_t *loc, kmp_int32 gtid,
3878 KMP_EXPORT void __kmpc_doacross_fini(ident_t *loc, kmp_int32 gtid);
3880 KMP_EXPORT void *__kmpc_threadprivate_cached(ident_t *loc, kmp_int32 global_tid,
3999 int __kmp_execute_tasks_32(kmp_info_t *thread, kmp_int32 gtid,
4005 kmp_int32 is_constrained);
4007 int __kmp_execute_tasks_64(kmp_info_t *thread, kmp_int32 gtid,
4013 kmp_int32 is_constrained);
4014 int __kmp_execute_tasks_oncore(kmp_info_t *thread, kmp_int32 gtid,
4020 kmp_int32 is_constrained);