• Home
  • Raw
  • Download

Lines Matching refs:gtid

104 void __kmp_dispatch_init_algorithm(ident_t *loc, int gtid,  in __kmp_dispatch_init_algorithm()  argument
134 KD_TRACE(10, (buff, gtid, pr, lb, ub, st, schedule, chunk, nproc, tid)); in __kmp_dispatch_init_algorithm()
139 th = __kmp_threads[gtid]; in __kmp_dispatch_init_algorithm()
146 KMP_MASTER_GTID(gtid) && th->th.th_teams_microtask == NULL && in __kmp_dispatch_init_algorithm()
211 KD_TRACE(10, (buff, gtid, schedule, chunk)); in __kmp_dispatch_init_algorithm()
235 KD_TRACE(10, (buff, gtid, schedule, chunk)); in __kmp_dispatch_init_algorithm()
280 KD_TRACE(10, (buff, gtid, schedule, chunk)); in __kmp_dispatch_init_algorithm()
324 if (KMP_MASTER_GTID(gtid)) { in __kmp_dispatch_init_algorithm()
355 gtid)); in __kmp_dispatch_init_algorithm()
394 gtid)); in __kmp_dispatch_init_algorithm()
407 gtid)); in __kmp_dispatch_init_algorithm()
473 gtid)); in __kmp_dispatch_init_algorithm()
487 gtid)); in __kmp_dispatch_init_algorithm()
502 gtid)); in __kmp_dispatch_init_algorithm()
508 gtid)); in __kmp_dispatch_init_algorithm()
516 gtid)); in __kmp_dispatch_init_algorithm()
629 gtid)); in __kmp_dispatch_init_algorithm()
640 gtid)); in __kmp_dispatch_init_algorithm()
650 gtid)); in __kmp_dispatch_init_algorithm()
658 gtid)); in __kmp_dispatch_init_algorithm()
768 __kmp_dispatch_init(ident_t *loc, int gtid, enum sched_type schedule, T lb, in __kmp_dispatch_init() argument
784 __kmp_assert_valid_gtid(gtid); in __kmp_dispatch_init()
803 KD_TRACE(10, (buff, gtid, schedule, chunk, lb, ub, st)); in __kmp_dispatch_init()
808 th = __kmp_threads[gtid]; in __kmp_dispatch_init()
841 gtid)); in __kmp_dispatch_init()
857 KMP_MASTER_GTID(gtid) && th->th.th_teams_microtask == NULL && in __kmp_dispatch_init()
875 KD_TRACE(10, ("__kmp_dispatch_init: T#%d my_buffer_index:%d\n", gtid, in __kmp_dispatch_init()
879 __kmp_dispatch_init_algorithm(loc, gtid, pr, schedule, lb, ub, st, in __kmp_dispatch_init()
901 gtid, my_buffer_index, sh->buffer_index)); in __kmp_dispatch_init()
909 gtid, my_buffer_index, sh->buffer_index)); in __kmp_dispatch_init()
916 __kmp_itt_ordered_init(gtid); in __kmp_dispatch_init()
967 KD_TRACE(10, (buff, gtid, pr->schedule, pr->flags.ordered, pr->u.p.lb, in __kmp_dispatch_init()
995 &(task_info->task_data), pr->u.p.tc, OMPT_LOAD_RETURN_ADDRESS(gtid)); in __kmp_dispatch_init()
1007 static void __kmp_dispatch_finish(int gtid, ident_t *loc) { in __kmp_dispatch_finish() argument
1009 __kmp_assert_valid_gtid(gtid); in __kmp_dispatch_finish()
1010 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_finish()
1012 KD_TRACE(100, ("__kmp_dispatch_finish: T#%d called\n", gtid)); in __kmp_dispatch_finish()
1030 gtid)); in __kmp_dispatch_finish()
1042 KD_TRACE(1000, (buff, gtid, sh->u.s.ordered_iteration, lower)); in __kmp_dispatch_finish()
1057 KD_TRACE(1000, (buff, gtid, sh->u.s.ordered_iteration, lower)); in __kmp_dispatch_finish()
1065 KD_TRACE(100, ("__kmp_dispatch_finish: T#%d returned\n", gtid)); in __kmp_dispatch_finish()
1071 static void __kmp_dispatch_finish_chunk(int gtid, ident_t *loc) { in __kmp_dispatch_finish_chunk() argument
1073 __kmp_assert_valid_gtid(gtid); in __kmp_dispatch_finish_chunk()
1074 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_finish_chunk()
1076 KD_TRACE(100, ("__kmp_dispatch_finish_chunk: T#%d called\n", gtid)); in __kmp_dispatch_finish_chunk()
1099 gtid)); in __kmp_dispatch_finish_chunk()
1112 KD_TRACE(1000, (buff, gtid, sh->u.s.ordered_iteration, lower, upper)); in __kmp_dispatch_finish_chunk()
1123 gtid)); in __kmp_dispatch_finish_chunk()
1136 (buff, gtid, sh->u.s.ordered_iteration, inc, lower, upper)); in __kmp_dispatch_finish_chunk()
1145 KD_TRACE(100, ("__kmp_dispatch_finish_chunk: T#%d returned\n", gtid)); in __kmp_dispatch_finish_chunk()
1151 int __kmp_dispatch_next_algorithm(int gtid, in __kmp_dispatch_next_algorithm() argument
1165 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_next_algorithm()
1181 KD_TRACE(10, (buff, gtid, pr, sh, nproc, tid)); in __kmp_dispatch_next_algorithm()
1191 gtid, status)); in __kmp_dispatch_next_algorithm()
1202 gtid)); in __kmp_dispatch_next_algorithm()
1212 __kmp_acquire_lock(lck, gtid); in __kmp_dispatch_next_algorithm()
1216 __kmp_release_lock(lck, gtid); in __kmp_dispatch_next_algorithm()
1257 __kmp_acquire_lock(lck, gtid); in __kmp_dispatch_next_algorithm()
1261 __kmp_release_lock(lck, gtid); in __kmp_dispatch_next_algorithm()
1276 __kmp_release_lock(lck, gtid); in __kmp_dispatch_next_algorithm()
1283 __kmp_acquire_lock(pr->u.p.th_steal_lock, gtid); in __kmp_dispatch_next_algorithm()
1286 __kmp_release_lock(pr->u.p.th_steal_lock, gtid); in __kmp_dispatch_next_algorithm()
1430 gtid)); in __kmp_dispatch_next_algorithm()
1451 gtid)); in __kmp_dispatch_next_algorithm()
1492 gtid)); in __kmp_dispatch_next_algorithm()
1533 gtid)); in __kmp_dispatch_next_algorithm()
1602 gtid)); in __kmp_dispatch_next_algorithm()
1680 gtid)); in __kmp_dispatch_next_algorithm()
1771 gtid)); in __kmp_dispatch_next_algorithm()
1826 KD_TRACE(1000, (buff, gtid, pr->u.p.ordered_lower, pr->u.p.ordered_upper)); in __kmp_dispatch_next_algorithm()
1836 KD_TRACE(10, (buff, gtid, status, *p_last, *p_lb, *p_ub, *p_st)); in __kmp_dispatch_next_algorithm()
1895 static int __kmp_dispatch_next(ident_t *loc, int gtid, kmp_int32 *p_last, in __kmp_dispatch_next() argument
1914 __kmp_assert_valid_gtid(gtid); in __kmp_dispatch_next()
1915 kmp_info_t *th = __kmp_threads[gtid]; in __kmp_dispatch_next()
1922 gtid, p_lb, p_ub, p_st, p_last)); in __kmp_dispatch_next()
1939 pr->pushed_ws = __kmp_pop_workshare(gtid, pr->pushed_ws, loc); in __kmp_dispatch_next()
1950 gtid)); in __kmp_dispatch_next()
1964 pr->pushed_ws = __kmp_pop_workshare(gtid, pr->pushed_ws, loc); in __kmp_dispatch_next()
2000 KD_TRACE(1000, (buff, gtid, pr->u.p.ordered_lower, in __kmp_dispatch_next()
2027 KD_TRACE(10, (buff, gtid, *p_lb, *p_ub, *p_st, p_last, in __kmp_dispatch_next()
2054 status = sh->hier->next(loc, gtid, pr, &last, p_lb, p_ub, p_st); in __kmp_dispatch_next()
2057 status = __kmp_dispatch_next_algorithm<T>(gtid, pr, sh, &last, p_lb, p_ub, in __kmp_dispatch_next()
2072 KD_TRACE(10, (buff, gtid, sh->u.s.num_done)); in __kmp_dispatch_next()
2117 gtid, sh->buffer_index)); in __kmp_dispatch_next()
2124 pr->pushed_ws = __kmp_pop_workshare(gtid, pr->pushed_ws, loc); in __kmp_dispatch_next()
2150 KD_TRACE(10, (buff, gtid, *p_lb, *p_ub, p_st ? *p_st : 0, p_last, in __kmp_dispatch_next()
2164 static void __kmp_dist_get_bounds(ident_t *loc, kmp_int32 gtid, in __kmp_dist_get_bounds() argument
2175 KE_TRACE(10, ("__kmpc_dist_get_bounds called (%d)\n", gtid)); in __kmp_dist_get_bounds()
2185 KD_TRACE(100, (buff, gtid, *plastiter, *plower, *pupper, incr)); in __kmp_dist_get_bounds()
2208 __kmp_assert_valid_gtid(gtid); in __kmp_dist_get_bounds()
2209 th = __kmp_threads[gtid]; in __kmp_dist_get_bounds()
2301 void __kmpc_dispatch_init_4(ident_t *loc, kmp_int32 gtid, in __kmpc_dispatch_init_4() argument
2306 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_init_4()
2308 __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dispatch_init_4()
2313 void __kmpc_dispatch_init_4u(ident_t *loc, kmp_int32 gtid, in __kmpc_dispatch_init_4u() argument
2318 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_init_4u()
2320 __kmp_dispatch_init<kmp_uint32>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dispatch_init_4u()
2326 void __kmpc_dispatch_init_8(ident_t *loc, kmp_int32 gtid, in __kmpc_dispatch_init_8() argument
2331 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_init_8()
2333 __kmp_dispatch_init<kmp_int64>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dispatch_init_8()
2339 void __kmpc_dispatch_init_8u(ident_t *loc, kmp_int32 gtid, in __kmpc_dispatch_init_8u() argument
2344 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_init_8u()
2346 __kmp_dispatch_init<kmp_uint64>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dispatch_init_8u()
2358 void __kmpc_dist_dispatch_init_4(ident_t *loc, kmp_int32 gtid, in __kmpc_dist_dispatch_init_4() argument
2364 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dist_dispatch_init_4()
2366 __kmp_dist_get_bounds<kmp_int32>(loc, gtid, p_last, &lb, &ub, st); in __kmpc_dist_dispatch_init_4()
2367 __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dist_dispatch_init_4()
2370 void __kmpc_dist_dispatch_init_4u(ident_t *loc, kmp_int32 gtid, in __kmpc_dist_dispatch_init_4u() argument
2376 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dist_dispatch_init_4u()
2378 __kmp_dist_get_bounds<kmp_uint32>(loc, gtid, p_last, &lb, &ub, st); in __kmpc_dist_dispatch_init_4u()
2379 __kmp_dispatch_init<kmp_uint32>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dist_dispatch_init_4u()
2382 void __kmpc_dist_dispatch_init_8(ident_t *loc, kmp_int32 gtid, in __kmpc_dist_dispatch_init_8() argument
2388 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dist_dispatch_init_8()
2390 __kmp_dist_get_bounds<kmp_int64>(loc, gtid, p_last, &lb, &ub, st); in __kmpc_dist_dispatch_init_8()
2391 __kmp_dispatch_init<kmp_int64>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dist_dispatch_init_8()
2394 void __kmpc_dist_dispatch_init_8u(ident_t *loc, kmp_int32 gtid, in __kmpc_dist_dispatch_init_8u() argument
2400 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dist_dispatch_init_8u()
2402 __kmp_dist_get_bounds<kmp_uint64>(loc, gtid, p_last, &lb, &ub, st); in __kmpc_dist_dispatch_init_8u()
2403 __kmp_dispatch_init<kmp_uint64>(loc, gtid, schedule, lb, ub, st, chunk, true); in __kmpc_dist_dispatch_init_8u()
2419 int __kmpc_dispatch_next_4(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last, in __kmpc_dispatch_next_4() argument
2422 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_next_4()
2424 return __kmp_dispatch_next<kmp_int32>(loc, gtid, p_last, p_lb, p_ub, p_st in __kmpc_dispatch_next_4()
2427 OMPT_LOAD_RETURN_ADDRESS(gtid) in __kmpc_dispatch_next_4()
2435 int __kmpc_dispatch_next_4u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last, in __kmpc_dispatch_next_4u() argument
2439 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_next_4u()
2441 return __kmp_dispatch_next<kmp_uint32>(loc, gtid, p_last, p_lb, p_ub, p_st in __kmpc_dispatch_next_4u()
2444 OMPT_LOAD_RETURN_ADDRESS(gtid) in __kmpc_dispatch_next_4u()
2452 int __kmpc_dispatch_next_8(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last, in __kmpc_dispatch_next_8() argument
2455 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_next_8()
2457 return __kmp_dispatch_next<kmp_int64>(loc, gtid, p_last, p_lb, p_ub, p_st in __kmpc_dispatch_next_8()
2460 OMPT_LOAD_RETURN_ADDRESS(gtid) in __kmpc_dispatch_next_8()
2468 int __kmpc_dispatch_next_8u(ident_t *loc, kmp_int32 gtid, kmp_int32 *p_last, in __kmpc_dispatch_next_8u() argument
2472 OMPT_STORE_RETURN_ADDRESS(gtid); in __kmpc_dispatch_next_8u()
2474 return __kmp_dispatch_next<kmp_uint64>(loc, gtid, p_last, p_lb, p_ub, p_st in __kmpc_dispatch_next_8u()
2477 OMPT_LOAD_RETURN_ADDRESS(gtid) in __kmpc_dispatch_next_8u()
2488 void __kmpc_dispatch_fini_4(ident_t *loc, kmp_int32 gtid) { in __kmpc_dispatch_fini_4() argument
2489 __kmp_dispatch_finish<kmp_uint32>(gtid, loc); in __kmpc_dispatch_fini_4()
2495 void __kmpc_dispatch_fini_8(ident_t *loc, kmp_int32 gtid) { in __kmpc_dispatch_fini_8() argument
2496 __kmp_dispatch_finish<kmp_uint64>(gtid, loc); in __kmpc_dispatch_fini_8()
2502 void __kmpc_dispatch_fini_4u(ident_t *loc, kmp_int32 gtid) { in __kmpc_dispatch_fini_4u() argument
2503 __kmp_dispatch_finish<kmp_uint32>(gtid, loc); in __kmpc_dispatch_fini_4u()
2509 void __kmpc_dispatch_fini_8u(ident_t *loc, kmp_int32 gtid) { in __kmpc_dispatch_fini_8u() argument
2510 __kmp_dispatch_finish<kmp_uint64>(gtid, loc); in __kmpc_dispatch_fini_8u()
2590 void __kmp_aux_dispatch_init_4(ident_t *loc, kmp_int32 gtid, in __kmp_aux_dispatch_init_4() argument
2594 __kmp_dispatch_init<kmp_int32>(loc, gtid, schedule, lb, ub, st, chunk, in __kmp_aux_dispatch_init_4()
2598 void __kmp_aux_dispatch_init_4u(ident_t *loc, kmp_int32 gtid, in __kmp_aux_dispatch_init_4u() argument
2602 __kmp_dispatch_init<kmp_uint32>(loc, gtid, schedule, lb, ub, st, chunk, in __kmp_aux_dispatch_init_4u()
2606 void __kmp_aux_dispatch_init_8(ident_t *loc, kmp_int32 gtid, in __kmp_aux_dispatch_init_8() argument
2610 __kmp_dispatch_init<kmp_int64>(loc, gtid, schedule, lb, ub, st, chunk, in __kmp_aux_dispatch_init_8()
2614 void __kmp_aux_dispatch_init_8u(ident_t *loc, kmp_int32 gtid, in __kmp_aux_dispatch_init_8u() argument
2618 __kmp_dispatch_init<kmp_uint64>(loc, gtid, schedule, lb, ub, st, chunk, in __kmp_aux_dispatch_init_8u()
2622 void __kmp_aux_dispatch_fini_chunk_4(ident_t *loc, kmp_int32 gtid) { in __kmp_aux_dispatch_fini_chunk_4() argument
2623 __kmp_dispatch_finish_chunk<kmp_uint32>(gtid, loc); in __kmp_aux_dispatch_fini_chunk_4()
2626 void __kmp_aux_dispatch_fini_chunk_8(ident_t *loc, kmp_int32 gtid) { in __kmp_aux_dispatch_fini_chunk_8() argument
2627 __kmp_dispatch_finish_chunk<kmp_uint64>(gtid, loc); in __kmp_aux_dispatch_fini_chunk_8()
2630 void __kmp_aux_dispatch_fini_chunk_4u(ident_t *loc, kmp_int32 gtid) { in __kmp_aux_dispatch_fini_chunk_4u() argument
2631 __kmp_dispatch_finish_chunk<kmp_uint32>(gtid, loc); in __kmp_aux_dispatch_fini_chunk_4u()
2634 void __kmp_aux_dispatch_fini_chunk_8u(ident_t *loc, kmp_int32 gtid) { in __kmp_aux_dispatch_fini_chunk_8u() argument
2635 __kmp_dispatch_finish_chunk<kmp_uint64>(gtid, loc); in __kmp_aux_dispatch_fini_chunk_8u()