Home
last modified time | relevance | path

Searched refs:kmp_uint32 (Results 1 – 25 of 26) sorted by relevance

12

/external/llvm-project/openmp/runtime/src/
Dz_Windows_NT-586_util.cpp47 kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 d) { in __kmp_test_then_or32()
48 kmp_uint32 old_value, new_value; in __kmp_test_then_or32()
62 kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 d) { in __kmp_test_then_and32()
63 kmp_uint32 old_value, new_value; in __kmp_test_then_and32()
Dkmp_os.h122 typedef unsigned int kmp_uint32; typedef
158 typedef unsigned int kmp_uint32; typedef
199 typedef kmp_uint32 kmp_uint;
471 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
472 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
634 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
636 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
664 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
665 (kmp_uint32)(sv))
667 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
[all …]
Dkmp.h1164 kmp_uint32 eax;
1165 kmp_uint32 ebx;
1166 kmp_uint32 ecx;
1167 kmp_uint32 edx;
1227 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) { _mm_setcsr(*p); } in __kmp_load_mxcsr()
1228 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = _mm_getcsr(); } in __kmp_store_mxcsr()
1230 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) {} in __kmp_load_mxcsr()
1231 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = 0; } in __kmp_store_mxcsr()
1239 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) { _mm_setcsr(*p); } in __kmp_load_mxcsr()
1240 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = _mm_getcsr(); } in __kmp_store_mxcsr()
[all …]
Dkmp_affinity.h650 static const kmp_uint32 maxLeaves = 4;
651 static const kmp_uint32 minBranch = 4;
657 kmp_uint32 maxLevels;
663 kmp_uint32 depth;
664 kmp_uint32 base_num_threads;
674 kmp_uint32 *numPerLevel;
675 kmp_uint32 *skipPerLevel;
721 (kmp_uint32 *)__kmp_allocate(maxLevels * 2 * sizeof(kmp_uint32)); in init()
723 for (kmp_uint32 i = 0; i < maxLevels; in init()
747 kmp_uint32 branch = minBranch; in init()
[all …]
Dkmp_wait_release.h179 kmp_uint32 spins; in __kmp_wait_template()
187 kmp_uint32 hibernate; in __kmp_wait_template()
610 template <> struct flag_traits<kmp_uint32> {
611 typedef kmp_uint32 flag_t;
648 kmp_uint32
664 kmp_info_t *get_waiter(kmp_uint32 i) {
671 kmp_uint32 get_num_waiters() { return num_waiting_threads; }
752 kmp_uint32
768 kmp_info_t *get_waiter(kmp_uint32 i) {
775 kmp_uint32 get_num_waiters() { return num_waiting_threads; }
[all …]
Dkmp_dispatch.cpp725 __kmp_dispatch_init_hier_runtime<kmp_uint32>(ident_t *loc, kmp_uint32 lb, in __kmp_dispatch_init_hier_runtime()
726 kmp_uint32 ub, kmp_int32 st) { in __kmp_dispatch_init_hier_runtime()
727 __kmp_dispatch_init_hierarchy<kmp_uint32>( in __kmp_dispatch_init_hier_runtime()
776 kmp_uint32 my_buffer_index; in __kmp_dispatch_init()
902 __kmp_wait<kmp_uint32>(&sh->buffer_index, my_buffer_index, in __kmp_dispatch_init()
903 __kmp_eq<kmp_uint32> USE_ITT_BUILD_ARG(NULL)); in __kmp_dispatch_init()
2168 kmp_uint32 team_id; in __kmp_dist_get_bounds()
2169 kmp_uint32 nteams; in __kmp_dist_get_bounds()
2214 KMP_DEBUG_ASSERT(nteams == (kmp_uint32)team->t.t_parent->t.t_nproc); in __kmp_dist_get_bounds()
2314 enum sched_type schedule, kmp_uint32 lb, in __kmpc_dispatch_init_4u()
[all …]
Dkmp_lock.h68 typedef kmp_uint32 kmp_lock_flags_t;
73 typedef kmp_uint32 kmp_lock_index_t;
320 kmp_uint32 successfulSpeculations;
321 kmp_uint32 hardFailedSpeculations;
322 kmp_uint32 softFailedSpeculations;
323 kmp_uint32 nonSpeculativeAcquires;
324 kmp_uint32 nonSpeculativeAcquireAttempts;
325 kmp_uint32 lemmingYields;
340 kmp_uint32 volatile badness;
341 kmp_uint32 volatile acquire_attempts;
[all …]
Dkmp_sched.cpp90 kmp_uint32 tid; in __kmp_for_static_init()
91 kmp_uint32 nth; in __kmp_for_static_init()
439 kmp_uint32 tid; in __kmp_dist_for_static_init()
440 kmp_uint32 nth; in __kmp_dist_for_static_init()
441 kmp_uint32 team_id; in __kmp_dist_for_static_init()
442 kmp_uint32 nteams; in __kmp_dist_for_static_init()
491 KMP_DEBUG_ASSERT(nteams == (kmp_uint32)team->t.t_parent->t.t_nproc); in __kmp_dist_for_static_init()
682 kmp_uint32 team_id; in __kmp_team_static_init()
683 kmp_uint32 nteams; in __kmp_team_static_init()
733 KMP_DEBUG_ASSERT(nteams == (kmp_uint32)team->t.t_parent->t.t_nproc); in __kmp_team_static_init()
[all …]
Dkmp_global.cpp76 kmp_uint32 __kmp_barrier_gather_bb_dflt = 2;
78 kmp_uint32 __kmp_barrier_release_bb_dflt = 2;
86 kmp_uint32 __kmp_barrier_gather_branch_bits[bs_last_barrier] = {0};
87 kmp_uint32 __kmp_barrier_release_branch_bits[bs_last_barrier] = {0};
185 kmp_uint32 __kmp_init_mxcsr = 0;
415 kmp_uint32 __kmp_yield_init = KMP_INIT_WAIT;
416 kmp_uint32 __kmp_yield_next = KMP_NEXT_WAIT;
Dkmp_barrier.cpp162 kmp_uint32 nproc = this_thr->th.th_team_nproc; in __kmp_linear_barrier_release_template()
299 kmp_uint32 nproc = this_thr->th.th_team_nproc; in __kmp_tree_barrier_gather()
300 kmp_uint32 branch_bits = __kmp_barrier_gather_branch_bits[bt]; in __kmp_tree_barrier_gather()
301 kmp_uint32 branch_factor = 1 << branch_bits; in __kmp_tree_barrier_gather()
302 kmp_uint32 child; in __kmp_tree_barrier_gather()
303 kmp_uint32 child_tid; in __kmp_tree_barrier_gather()
409 kmp_uint32 nproc; in __kmp_tree_barrier_release()
410 kmp_uint32 branch_bits = __kmp_barrier_release_branch_bits[bt]; in __kmp_tree_barrier_release()
411 kmp_uint32 branch_factor = 1 << branch_bits; in __kmp_tree_barrier_release()
412 kmp_uint32 child; in __kmp_tree_barrier_release()
[all …]
Dkmp_dispatch.h137 kmp_uint32 ordered_bumped;
141 kmp_uint32 type_size;
173 volatile kmp_uint32 buffer_index;
175 kmp_uint32 *doacross_flags; // array of iteration flags (0/1)
262 template <typename T> kmp_uint32 __kmp_ge(T value, T checker) { in __kmp_ge()
265 template <typename T> kmp_uint32 __kmp_eq(T value, T checker) { in __kmp_eq()
290 kmp_uint32 (*pred)(UT, UT) USE_ITT_BUILD_ARG(void *obj)) { in __kmp_wait()
294 kmp_uint32 spins; in __kmp_wait()
295 kmp_uint32 (*f)(UT, UT) = pred; in __kmp_wait()
Dkmp_atomic.h501 void __kmpc_atomic_fixed4u_div(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
502 kmp_uint32 rhs);
511 void __kmpc_atomic_fixed4u_shr(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
512 kmp_uint32 rhs);
722 void __kmpc_atomic_fixed4u_div_rev(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
723 kmp_uint32 rhs);
728 void __kmpc_atomic_fixed4u_shr_rev(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
729 kmp_uint32 rhs);
861 void __kmpc_atomic_fixed4u_add_fp(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
865 void __kmpc_atomic_fixed4u_sub_fp(ident_t *id_ref, int gtid, kmp_uint32 *lhs,
[all …]
Dkmp_error.h35 kmp_user_lock_p name, kmp_uint32);
44 kmp_user_lock_p name, kmp_uint32);
Dkmp_lock.cpp47 kmp_uint32 x, y; in __kmp_validate_locks()
50 x = ~((kmp_uint32)0) - 2; in __kmp_validate_locks()
54 kmp_uint32 z = (x - y); in __kmp_validate_locks()
85 kmp_uint32 curr = KMP_LOCK_STRIP(lck->lk.poll); in __kmp_acquire_tas_lock_timed_template()
100 kmp_uint32 spins; in __kmp_acquire_tas_lock_timed_template()
324 kmp_uint32 curr = KMP_LOCK_STRIP(TCR_4(lck->lk.poll)); in __kmp_acquire_futex_lock_timed_template()
619 static kmp_uint32 __kmp_bakery_check(void *now_serving, kmp_uint32 my_ticket) { in __kmp_bakery_check()
627 kmp_uint32 my_ticket = std::atomic_fetch_add_explicit( in __kmp_acquire_ticket_lock_timed_template()
677 kmp_uint32 my_ticket = std::atomic_load_explicit(&lck->lk.next_ticket, in __kmp_test_ticket_lock()
682 kmp_uint32 next_ticket = my_ticket + 1; in __kmp_test_ticket_lock()
[all …]
Dkmp_atomic.cpp998 ATOMIC_CMPXCHG(fixed4u, div, kmp_uint32, 32, /, 4i, 3,
1008 ATOMIC_CMPXCHG(fixed4u, shr, kmp_uint32, 32, >>, 4i, 3,
1481 ATOMIC_CMPXCHG_REV(fixed4u, div, kmp_uint32, 32, /, 4i,
1487 ATOMIC_CMPXCHG_REV(fixed4u, shr, kmp_uint32, 32, >>, 4i,
1714 ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, add, 32, +, fp, _Quad, 4i, 3,
1718 ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, sub, 32, -, fp, _Quad, 4i, 3,
1722 ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, mul, 32, *, fp, _Quad, 4i, 3,
1726 ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, div, 32, /, fp, _Quad, 4i, 3,
1795 ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, sub_rev, 32, -, fp, _Quad, 4i, 3,
1799 ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, div_rev, 32, /, fp, _Quad, 4i, 3,
[all …]
Dz_Linux_util.cpp364 kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 d) { in __kmp_test_then_or32()
365 kmp_uint32 old_value, new_value; in __kmp_test_then_or32()
378 kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 d) { in __kmp_test_then_and32()
379 kmp_uint32 old_value, new_value; in __kmp_test_then_and32()
1019 KMP_DEBUG_ASSERT(sizeof(kmp_uint32) == in __kmp_create_monitor()
1021 __kmp_wait_4((kmp_uint32 volatile *)&__kmp_global.g.g_time.dt.t_value, -1, in __kmp_create_monitor()
Dkmp_tasking.cpp1869 RCAST(std::atomic<kmp_uint32> *, in __kmpc_omp_taskwait_template()
2506 RCAST(std::atomic<kmp_uint32> *, &(taskgroup->count)), 0U); in __kmpc_end_taskgroup()
2603 kmp_uint32 tail; in __kmp_remove_my_task()
2765 (kmp_uint32)((target + 1) & TASK_DEQUE_MASK(victim_td->td))); in __kmp_steal_task()
3378 CCAST(kmp_uint32 *, &task_team->tt.tt_active), in __kmp_allocate_task_team()
3448 kmp_uint32 spins; in __kmp_wait_to_unref_task_teams()
3610 RCAST(std::atomic<kmp_uint32> *, in __kmp_task_team_wait()
3639 std::atomic<kmp_uint32> *spin = RCAST( in __kmp_tasking_barrier()
3640 std::atomic<kmp_uint32> *, in __kmp_tasking_barrier()
4101 kmp_uint32 *lower = RCAST(kmp_uint32 *, task->shareds); in set_lb()
[all …]
Dkmp_csupport.cpp973 kmp_uint32 spins; \
3874 kmp_uint32 *flags; in __kmpc_doacross_init()
3950 __kmp_wait_4((volatile kmp_uint32 *)&sh_buf->doacross_buf_idx, idx, in __kmpc_doacross_init()
3957 flags = (kmp_uint32 *)KMP_COMPARE_AND_STORE_RET32( in __kmpc_doacross_init()
3960 flags = (kmp_uint32 *)KMP_COMPARE_AND_STORE_RET64( in __kmpc_doacross_init()
3966 flags = (kmp_uint32 *)__kmp_thread_calloc(th, size, 1); in __kmpc_doacross_init()
3969 } else if (flags == (kmp_uint32 *)1) { in __kmpc_doacross_init()
3981 KMP_DEBUG_ASSERT(sh_buf->doacross_flags > (kmp_uint32 *)1); // check ptr value in __kmpc_doacross_init()
3991 kmp_uint32 flag; in __kmpc_doacross_wait()
4102 kmp_uint32 flag; in __kmpc_doacross_post()
[all …]
Dkmp_error.cpp244 …eck_sync( int gtid, enum cons_type ct, ident_t const * ident, kmp_user_lock_p lck, kmp_uint32 seq )
330 …ush_sync( int gtid, enum cons_type ct, ident_t const * ident, kmp_user_lock_p lck, kmp_uint32 seq )
Dkmp_utility.cpp146 kmp_uint32 t, data[4]; in __kmp_query_cpuid()
Dkmp_taskdeps.cpp790 (std::atomic<kmp_uint32> *)&node.dn.npredecessors, 0U); in __kmpc_omp_wait_deps()
Dkmp_settings.cpp1539 (kmp_uint32)__kmp_str_to_int(value, ','); in __kmp_stg_parse_barrier_branch_bit()
1545 (kmp_uint32)__kmp_str_to_int(comma + 1, 0); in __kmp_stg_parse_barrier_branch_bit()
4189 kmp_uint32 max_backoff = __kmp_spin_backoff_params.max_backoff; in __kmp_stg_parse_spin_backoff_params()
4190 kmp_uint32 min_tick = __kmp_spin_backoff_params.min_tick; in __kmp_stg_parse_spin_backoff_params()
Dkmp_runtime.cpp1081 kmp_uint32 mxcsr; in propagateFPControl()
1116 kmp_uint32 mxcsr; in updateHWFPControl()
2090 kmp_uint32 new_size = 2 * master_th->th.th_task_state_stack_sz; in __kmp_fork_call()
2092 kmp_uint32 i; in __kmp_fork_call()
5449 volatile kmp_uint32 *state = &th->th.th_reap_state; in __kmp_free_team()
6636 KMP_DEBUG_ASSERT(sizeof(kmp_uint32) == 4); in __kmp_do_serial_initialize()
Dz_Windows_NT_util.cpp1291 kmp_uint32 spins; in __kmp_reap_common()
Dkmp_itt.inl520 kmp_uint32 impl = 0;

12