Lines Matching refs:kmp_uint32
1164 kmp_uint32 eax;
1165 kmp_uint32 ebx;
1166 kmp_uint32 ecx;
1167 kmp_uint32 edx;
1227 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) { _mm_setcsr(*p); } in __kmp_load_mxcsr()
1228 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = _mm_getcsr(); } in __kmp_store_mxcsr()
1230 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) {} in __kmp_load_mxcsr()
1231 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = 0; } in __kmp_store_mxcsr()
1239 static inline void __kmp_load_mxcsr(const kmp_uint32 *p) { _mm_setcsr(*p); } in __kmp_load_mxcsr()
1240 static inline void __kmp_store_mxcsr(kmp_uint32 *p) { *p = _mm_getcsr(); } in __kmp_store_mxcsr()
1658 kmp_uint32 ordered_lower;
1659 kmp_uint32 ordered_upper;
1716 kmp_uint32 ordered_lower;
1717 kmp_uint32 ordered_upper;
1768 volatile kmp_uint32 iteration;
1769 volatile kmp_uint32 num_done;
1770 volatile kmp_uint32 ordered_iteration;
1790 volatile kmp_uint32 buffer_index;
1792 volatile kmp_uint32 *doacross_flags; // shared array of iteration flags (0/1)
1817 volatile kmp_uint32 *th_doacross_flags; // pointer to shared array of flags
1928 kmp_uint32 *skip_per_level;
1929 kmp_uint32 my_level;
1932 kmp_uint32 depth;
1936 kmp_uint32 nproc;
2248 kmp_uint32 id;
2274 kmp_uint32 nelements;
2275 kmp_uint32 nconflicts;
2363 kmp_uint32 td_taskwait_counter;
2407 kmp_uint32 td_deque_head; // Head of deque (will wrap)
2408 kmp_uint32 td_deque_tail; // Tail of deque (will wrap)
2451 volatile kmp_uint32
2581 volatile kmp_uint32 th_spin_here; /* thread-local location for spinning */
2596 kmp_uint32 th_task_state_top; // Top element of th_task_state_memo_stack
2597 kmp_uint32 th_task_state_stack_sz; // Size of th_task_state_memo_stack
2598 kmp_uint32 th_reap_state; // Non-zero indicates thread is not
2655 typedef struct kmp_base_data { volatile kmp_uint32 t_value; } kmp_base_data_t;
2682 sizeof(kmp_int16) + sizeof(kmp_uint32)) % \
2735 kmp_uint32 t_mxcsr;
2777 std::atomic<kmp_uint32> t_copyin_counter;
2922 extern kmp_uint32 __kmp_barrier_gather_bb_dflt;
2923 extern kmp_uint32 __kmp_barrier_release_bb_dflt;
2926 extern kmp_uint32 __kmp_barrier_gather_branch_bits[bs_last_barrier];
2927 extern kmp_uint32 __kmp_barrier_release_branch_bits[bs_last_barrier];
2983 extern kmp_uint32 __kmp_yield_init;
2984 extern kmp_uint32 __kmp_yield_next;
3048 extern kmp_uint32 __kmp_init_mxcsr; /* init thread's mxscr */
3091 kmp_uint32 max_soft_retries;
3094 kmp_uint32 max_badness;
3323 enum sched_type schedule, kmp_uint32 lb,
3324 kmp_uint32 ub, kmp_int32 st,
3338 kmp_int32 *p_last, kmp_uint32 *p_lb,
3339 kmp_uint32 *p_ub, kmp_int32 *p_st);
3359 enum sched_type schedule, kmp_uint32 lb,
3360 kmp_uint32 ub, kmp_int32 st,
3377 extern kmp_uint32 __kmp_eq_4(kmp_uint32 value, kmp_uint32 checker);
3378 extern kmp_uint32 __kmp_neq_4(kmp_uint32 value, kmp_uint32 checker);
3379 extern kmp_uint32 __kmp_lt_4(kmp_uint32 value, kmp_uint32 checker);
3380 extern kmp_uint32 __kmp_ge_4(kmp_uint32 value, kmp_uint32 checker);
3381 extern kmp_uint32 __kmp_le_4(kmp_uint32 value, kmp_uint32 checker);
3382 extern kmp_uint32 __kmp_wait_4(kmp_uint32 volatile *spinner, kmp_uint32 checker,
3383 kmp_uint32 (*pred)(kmp_uint32, kmp_uint32),
3385 extern void __kmp_wait_4_ptr(void *spinner, kmp_uint32 checker,
3386 kmp_uint32 (*pred)(void *, kmp_uint32), void *obj);
3454 extern void __kmp_get_hierarchy(kmp_uint32 nproc, kmp_bstate_t *thr_bar);