• Home
  • Raw
  • Download

Lines Matching refs:kmp_int32

72 static kmp_int32 __kmp_get_tas_lock_owner(kmp_tas_lock_t *lck) {  in __kmp_get_tas_lock_owner()
81 __kmp_acquire_tas_lock_timed_template(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock_timed_template()
91 kmp_int32 tas_free = KMP_LOCK_FREE(tas); in __kmp_acquire_tas_lock_timed_template()
92 kmp_int32 tas_busy = KMP_LOCK_BUSY(gtid + 1, tas); in __kmp_acquire_tas_lock_timed_template()
113 int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock()
120 kmp_int32 gtid) { in __kmp_acquire_tas_lock_with_checks()
132 int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_tas_lock()
133 kmp_int32 tas_free = KMP_LOCK_FREE(tas); in __kmp_test_tas_lock()
134 kmp_int32 tas_busy = KMP_LOCK_BUSY(gtid + 1, tas); in __kmp_test_tas_lock()
144 kmp_int32 gtid) { in __kmp_test_tas_lock_with_checks()
153 int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_tas_lock()
166 kmp_int32 gtid) { in __kmp_release_tas_lock_with_checks()
203 int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_tas_lock()
218 kmp_int32 gtid) { in __kmp_acquire_nested_tas_lock_with_checks()
226 int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_tas_lock()
243 kmp_int32 gtid) { in __kmp_test_nested_tas_lock_with_checks()
251 int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_tas_lock()
263 kmp_int32 gtid) { in __kmp_release_nested_tas_lock_with_checks()
309 static kmp_int32 __kmp_get_futex_lock_owner(kmp_futex_lock_t *lck) { in __kmp_get_futex_lock_owner()
318 __kmp_acquire_futex_lock_timed_template(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock_timed_template()
319 kmp_int32 gtid_code = (gtid + 1) << 1; in __kmp_acquire_futex_lock_timed_template()
334 kmp_int32 poll_val; in __kmp_acquire_futex_lock_timed_template()
340 kmp_int32 cond = KMP_LOCK_STRIP(poll_val) & 1; in __kmp_acquire_futex_lock_timed_template()
375 kmp_int32 rc; in __kmp_acquire_futex_lock_timed_template()
399 int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock()
406 kmp_int32 gtid) { in __kmp_acquire_futex_lock_with_checks()
418 int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_futex_lock()
428 kmp_int32 gtid) { in __kmp_test_futex_lock_with_checks()
437 int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_futex_lock()
446 kmp_int32 poll_val = KMP_XCHG_FIXED32(&(lck->lk.poll), KMP_LOCK_FREE(futex)); in __kmp_release_futex_lock()
470 kmp_int32 gtid) { in __kmp_release_futex_lock_with_checks()
507 int __kmp_acquire_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_futex_lock()
522 kmp_int32 gtid) { in __kmp_acquire_nested_futex_lock_with_checks()
530 int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_futex_lock()
547 kmp_int32 gtid) { in __kmp_test_nested_futex_lock_with_checks()
555 int __kmp_release_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_futex_lock()
567 kmp_int32 gtid) { in __kmp_release_nested_futex_lock_with_checks()
608 static kmp_int32 __kmp_get_ticket_lock_owner(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_owner()
626 kmp_int32 gtid) { in __kmp_acquire_ticket_lock_timed_template()
645 int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_ticket_lock()
652 kmp_int32 gtid) { in __kmp_acquire_ticket_lock_with_checks()
676 int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_ticket_lock()
693 kmp_int32 gtid) { in __kmp_test_ticket_lock_with_checks()
716 int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_ticket_lock()
732 kmp_int32 gtid) { in __kmp_release_ticket_lock_with_checks()
808 int __kmp_acquire_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_ticket_lock()
827 kmp_int32 gtid) { in __kmp_acquire_nested_ticket_lock_with_checks()
843 int __kmp_test_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_ticket_lock()
865 kmp_int32 gtid) { in __kmp_test_nested_ticket_lock_with_checks()
881 int __kmp_release_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_ticket_lock()
895 kmp_int32 gtid) { in __kmp_release_nested_ticket_lock_with_checks()
1039 static void __kmp_dump_queuing_lock(kmp_info_t *this_thr, kmp_int32 gtid, in __kmp_dump_queuing_lock()
1040 kmp_queuing_lock_t *lck, kmp_int32 head_id, in __kmp_dump_queuing_lock()
1041 kmp_int32 tail_id) { in __kmp_dump_queuing_lock()
1042 kmp_int32 t, i; in __kmp_dump_queuing_lock()
1075 static kmp_int32 __kmp_get_queuing_lock_owner(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_owner()
1089 kmp_int32 gtid) { in __kmp_acquire_queuing_lock_timed_template()
1091 volatile kmp_int32 *head_id_p = &lck->lk.head_id; in __kmp_acquire_queuing_lock_timed_template()
1092 volatile kmp_int32 *tail_id_p = &lck->lk.tail_id; in __kmp_acquire_queuing_lock_timed_template()
1094 kmp_int32 need_mf = 1; in __kmp_acquire_queuing_lock_timed_template()
1127 kmp_int32 enqueued; in __kmp_acquire_queuing_lock_timed_template()
1128 kmp_int32 head; in __kmp_acquire_queuing_lock_timed_template()
1129 kmp_int32 tail; in __kmp_acquire_queuing_lock_timed_template()
1180 kmp_int32 grabbed_lock; in __kmp_acquire_queuing_lock_timed_template()
1284 int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_queuing_lock()
1293 kmp_int32 gtid) { in __kmp_acquire_queuing_lock_with_checks()
1311 int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_queuing_lock()
1312 volatile kmp_int32 *head_id_p = &lck->lk.head_id; in __kmp_test_queuing_lock()
1313 kmp_int32 head; in __kmp_test_queuing_lock()
1345 kmp_int32 gtid) { in __kmp_test_queuing_lock_with_checks()
1362 int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_queuing_lock()
1364 volatile kmp_int32 *head_id_p = &lck->lk.head_id; in __kmp_release_queuing_lock()
1365 volatile kmp_int32 *tail_id_p = &lck->lk.tail_id; in __kmp_release_queuing_lock()
1387 kmp_int32 dequeued; in __kmp_release_queuing_lock()
1388 kmp_int32 head; in __kmp_release_queuing_lock()
1389 kmp_int32 tail; in __kmp_release_queuing_lock()
1438 volatile kmp_int32 *waiting_id_p; in __kmp_release_queuing_lock()
1506 kmp_int32 gtid) { in __kmp_release_queuing_lock_with_checks()
1565 int __kmp_acquire_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_queuing_lock()
1584 kmp_int32 gtid) { in __kmp_acquire_nested_queuing_lock_with_checks()
1595 int __kmp_test_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_queuing_lock()
1614 kmp_int32 gtid) { in __kmp_test_nested_queuing_lock_with_checks()
1625 int __kmp_release_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_queuing_lock()
1640 kmp_int32 gtid) { in __kmp_release_nested_queuing_lock_with_checks()
1901 (kmp_int32)getpid()); in __kmp_open_stats_file()
2004 kmp_int32 gtid) { in __kmp_should_speculate()
2015 kmp_int32 gtid) { in __kmp_test_adaptive_lock_only()
2062 static int __kmp_test_adaptive_lock(kmp_adaptive_lock_t *lck, kmp_int32 gtid) { in __kmp_test_adaptive_lock()
2082 kmp_int32 gtid) { in __kmp_test_adaptive_lock_with_checks()
2108 kmp_int32 gtid) { in __kmp_acquire_adaptive_lock()
2141 kmp_int32 gtid) { in __kmp_acquire_adaptive_lock_with_checks()
2157 kmp_int32 gtid) { in __kmp_release_adaptive_lock()
2172 kmp_int32 gtid) { in __kmp_release_adaptive_lock_with_checks()
2227 static kmp_int32 __kmp_get_drdpa_lock_owner(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_owner()
2236 __kmp_acquire_drdpa_lock_timed_template(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock_timed_template()
2368 int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock()
2375 kmp_int32 gtid) { in __kmp_acquire_drdpa_lock_with_checks()
2393 int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_drdpa_lock()
2421 kmp_int32 gtid) { in __kmp_test_drdpa_lock_with_checks()
2438 int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_drdpa_lock()
2453 kmp_int32 gtid) { in __kmp_release_drdpa_lock_with_checks()
2526 int __kmp_acquire_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_drdpa_lock()
2544 kmp_int32 gtid) { in __kmp_acquire_nested_drdpa_lock_with_checks()
2555 int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_drdpa_lock()
2574 kmp_int32 gtid) { in __kmp_test_nested_drdpa_lock_with_checks()
2585 int __kmp_release_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_drdpa_lock()
2599 kmp_int32 gtid) { in __kmp_release_nested_drdpa_lock_with_checks()
2726 static void __kmp_acquire_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_hle_lock()
2741 kmp_int32 gtid) { in __kmp_acquire_hle_lock_with_checks()
2745 static int __kmp_release_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_release_hle_lock()
2754 kmp_int32 gtid) { in __kmp_release_hle_lock_with_checks()
2758 static int __kmp_test_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_test_hle_lock()
2763 kmp_int32 gtid) { in __kmp_test_hle_lock_with_checks()
2780 static void __kmp_acquire_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_rtm_lock()
2803 kmp_int32 gtid) { in __kmp_acquire_rtm_lock_with_checks()
2808 static int __kmp_release_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_rtm_lock()
2820 kmp_int32 gtid) { in __kmp_release_rtm_lock_with_checks()
2825 static int __kmp_test_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_rtm_lock()
2840 kmp_int32 gtid) { in __kmp_test_rtm_lock_with_checks()
2850 static int __kmp_set_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
2851 static int __kmp_unset_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
2852 static int __kmp_test_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32);
2854 kmp_int32);
2856 kmp_int32);
2858 kmp_int32);
2869 kmp_int32 gtid) { \
2923 0, (int (*)(kmp_dyna_lock_t *, kmp_int32))__kmp_##op##_##l##_lock,
2924 static int (*direct_set[])(kmp_dyna_lock_t *, kmp_int32) = {
2928 0, (int (*)(kmp_dyna_lock_t *, kmp_int32))__kmp_##op##_##l##_lock_with_checks,
2929 static int (*direct_set_check[])(kmp_dyna_lock_t *, kmp_int32) = {
2936 0, (int (*)(kmp_dyna_lock_t *, kmp_int32))__kmp_##op##_##l##_lock,
2937 static int (*direct_unset[])(kmp_dyna_lock_t *, kmp_int32) = {
2939 static int (*direct_test[])(kmp_dyna_lock_t *, kmp_int32) = {
2943 0, (int (*)(kmp_dyna_lock_t *, kmp_int32))__kmp_##op##_##l##_lock_with_checks,
2944 static int (*direct_unset_check[])(kmp_dyna_lock_t *, kmp_int32) = {
2947 static int (*direct_test_check[])(kmp_dyna_lock_t *, kmp_int32) = {
2953 int (**__kmp_direct_set)(kmp_dyna_lock_t *, kmp_int32) = 0;
2954 int (**__kmp_direct_unset)(kmp_dyna_lock_t *, kmp_int32) = 0;
2955 int (**__kmp_direct_test)(kmp_dyna_lock_t *, kmp_int32) = 0;
2975 (int (*)(kmp_user_lock_p, kmp_int32)) __kmp_##op##_##l##_##lock,
2977 kmp_int32) = {KMP_FOREACH_I_LOCK(expand, acquire)};
2980 (int (*)(kmp_user_lock_p, kmp_int32)) __kmp_##op##_##l##_##lock_with_checks,
2981 static int (*indirect_set_check[])(kmp_user_lock_p, kmp_int32) = {
2987 (int (*)(kmp_user_lock_p, kmp_int32)) __kmp_##op##_##l##_##lock,
2988 static int (*indirect_unset[])(kmp_user_lock_p, kmp_int32) = {
2991 kmp_int32) = {KMP_FOREACH_I_LOCK(expand, test)};
2994 (int (*)(kmp_user_lock_p, kmp_int32)) __kmp_##op##_##l##_##lock_with_checks,
2995 static int (*indirect_unset_check[])(kmp_user_lock_p, kmp_int32) = {
2997 static int (*indirect_test_check[])(kmp_user_lock_p, kmp_int32) = {
3003 int (**__kmp_indirect_set)(kmp_user_lock_p, kmp_int32) = 0;
3004 int (**__kmp_indirect_unset)(kmp_user_lock_p, kmp_int32) = 0;
3005 int (**__kmp_indirect_test)(kmp_user_lock_p, kmp_int32) = 0;
3032 kmp_int32 gtid, in __kmp_allocate_indirect_lock()
3159 static int __kmp_set_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) { in __kmp_set_indirect_lock()
3164 static int __kmp_unset_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) { in __kmp_unset_indirect_lock()
3169 static int __kmp_test_indirect_lock(kmp_dyna_lock_t *lock, kmp_int32 gtid) { in __kmp_test_indirect_lock()
3175 kmp_int32 gtid) { in __kmp_set_indirect_lock_with_checks()
3182 kmp_int32 gtid) { in __kmp_unset_indirect_lock_with_checks()
3189 kmp_int32 gtid) { in __kmp_test_indirect_lock_with_checks()
3198 kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p lck, kmp_uint32 seq) { in __kmp_get_user_lock_owner()
3437 kmp_int32 (*__kmp_get_user_lock_owner_)(kmp_user_lock_p lck) = NULL;
3439 kmp_int32 gtid) = NULL;
3442 kmp_int32 gtid) = NULL;
3444 kmp_int32 gtid) = NULL;
3449 kmp_int32 gtid) = NULL;
3452 kmp_int32 gtid) = NULL;
3454 kmp_int32 gtid) = NULL;
3477 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_tas_lock_owner); in __kmp_set_user_lock_vptrs()
3510 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_futex_lock_owner); in __kmp_set_user_lock_vptrs()
3543 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_ticket_lock_owner); in __kmp_set_user_lock_vptrs()
3577 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_queuing_lock_owner); in __kmp_set_user_lock_vptrs()
3612 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_queuing_lock_owner); in __kmp_set_user_lock_vptrs()
3646 (kmp_int32(*)(kmp_user_lock_p))(&__kmp_get_drdpa_lock_owner); in __kmp_set_user_lock_vptrs()
3746 kmp_user_lock_p __kmp_user_lock_allocate(void **user_lock, kmp_int32 gtid, in __kmp_user_lock_allocate()
3794 void __kmp_user_lock_free(void **user_lock, kmp_int32 gtid, in __kmp_user_lock_free()