• Home
  • Raw
  • Download

Lines Matching defs:lck

72 static kmp_int32 __kmp_get_tas_lock_owner(kmp_tas_lock_t *lck) {  in __kmp_get_tas_lock_owner()
76 static inline bool __kmp_is_tas_lock_nestable(kmp_tas_lock_t *lck) { in __kmp_is_tas_lock_nestable()
81 __kmp_acquire_tas_lock_timed_template(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock_timed_template()
113 int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock()
119 static int __kmp_acquire_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_acquire_tas_lock_with_checks()
132 int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_tas_lock()
143 static int __kmp_test_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_test_tas_lock_with_checks()
153 int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_tas_lock()
165 static int __kmp_release_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_release_tas_lock_with_checks()
183 void __kmp_init_tas_lock(kmp_tas_lock_t *lck) { in __kmp_init_tas_lock()
187 void __kmp_destroy_tas_lock(kmp_tas_lock_t *lck) { lck->lk.poll = 0; } in __kmp_destroy_tas_lock()
189 static void __kmp_destroy_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_destroy_tas_lock_with_checks()
203 int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_tas_lock()
217 static int __kmp_acquire_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_acquire_nested_tas_lock_with_checks()
226 int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_tas_lock()
242 static int __kmp_test_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_test_nested_tas_lock_with_checks()
251 int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_tas_lock()
262 static int __kmp_release_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_release_nested_tas_lock_with_checks()
278 void __kmp_init_nested_tas_lock(kmp_tas_lock_t *lck) { in __kmp_init_nested_tas_lock()
283 void __kmp_destroy_nested_tas_lock(kmp_tas_lock_t *lck) { in __kmp_destroy_nested_tas_lock()
288 static void __kmp_destroy_nested_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_destroy_nested_tas_lock_with_checks()
309 static kmp_int32 __kmp_get_futex_lock_owner(kmp_futex_lock_t *lck) { in __kmp_get_futex_lock_owner()
313 static inline bool __kmp_is_futex_lock_nestable(kmp_futex_lock_t *lck) { in __kmp_is_futex_lock_nestable()
318 __kmp_acquire_futex_lock_timed_template(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock_timed_template()
399 int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock()
405 static int __kmp_acquire_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_acquire_futex_lock_with_checks()
418 int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_futex_lock()
427 static int __kmp_test_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_test_futex_lock_with_checks()
437 int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_futex_lock()
469 static int __kmp_release_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_release_futex_lock_with_checks()
487 void __kmp_init_futex_lock(kmp_futex_lock_t *lck) { in __kmp_init_futex_lock()
491 void __kmp_destroy_futex_lock(kmp_futex_lock_t *lck) { lck->lk.poll = 0; } in __kmp_destroy_futex_lock()
493 static void __kmp_destroy_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_destroy_futex_lock_with_checks()
507 int __kmp_acquire_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_futex_lock()
521 static int __kmp_acquire_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_acquire_nested_futex_lock_with_checks()
530 int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_futex_lock()
546 static int __kmp_test_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_test_nested_futex_lock_with_checks()
555 int __kmp_release_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_futex_lock()
566 static int __kmp_release_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_release_nested_futex_lock_with_checks()
582 void __kmp_init_nested_futex_lock(kmp_futex_lock_t *lck) { in __kmp_init_nested_futex_lock()
587 void __kmp_destroy_nested_futex_lock(kmp_futex_lock_t *lck) { in __kmp_destroy_nested_futex_lock()
592 static void __kmp_destroy_nested_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_destroy_nested_futex_lock_with_checks()
608 static kmp_int32 __kmp_get_ticket_lock_owner(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_owner()
614 static inline bool __kmp_is_ticket_lock_nestable(kmp_ticket_lock_t *lck) { in __kmp_is_ticket_lock_nestable()
625 __kmp_acquire_ticket_lock_timed_template(kmp_ticket_lock_t *lck, in __kmp_acquire_ticket_lock_timed_template()
645 int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_ticket_lock()
651 static int __kmp_acquire_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_acquire_ticket_lock_with_checks()
676 int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_ticket_lock()
692 static int __kmp_test_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_test_ticket_lock_with_checks()
716 int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_ticket_lock()
731 static int __kmp_release_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_release_ticket_lock_with_checks()
756 void __kmp_init_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_init_ticket_lock()
773 void __kmp_destroy_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_destroy_ticket_lock()
787 static void __kmp_destroy_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_destroy_ticket_lock_with_checks()
808 int __kmp_acquire_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_ticket_lock()
826 static int __kmp_acquire_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_acquire_nested_ticket_lock_with_checks()
843 int __kmp_test_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_ticket_lock()
864 static int __kmp_test_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_test_nested_ticket_lock_with_checks()
881 int __kmp_release_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_ticket_lock()
894 static int __kmp_release_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_release_nested_ticket_lock_with_checks()
917 void __kmp_init_nested_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_init_nested_ticket_lock()
924 void __kmp_destroy_nested_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_destroy_nested_ticket_lock()
931 __kmp_destroy_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_destroy_nested_ticket_lock_with_checks()
952 static const ident_t *__kmp_get_ticket_lock_location(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_location()
956 static void __kmp_set_ticket_lock_location(kmp_ticket_lock_t *lck, in __kmp_set_ticket_lock_location()
961 static kmp_lock_flags_t __kmp_get_ticket_lock_flags(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_flags()
965 static void __kmp_set_ticket_lock_flags(kmp_ticket_lock_t *lck, in __kmp_set_ticket_lock_flags()
1040 kmp_queuing_lock_t *lck, kmp_int32 head_id, in __kmp_dump_queuing_lock()
1075 static kmp_int32 __kmp_get_queuing_lock_owner(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_owner()
1079 static inline bool __kmp_is_queuing_lock_nestable(kmp_queuing_lock_t *lck) { in __kmp_is_queuing_lock_nestable()
1088 __kmp_acquire_queuing_lock_timed_template(kmp_queuing_lock_t *lck, in __kmp_acquire_queuing_lock_timed_template()
1284 int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_queuing_lock()
1292 static int __kmp_acquire_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_queuing_lock_with_checks()
1311 int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_queuing_lock()
1344 static int __kmp_test_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_queuing_lock_with_checks()
1362 int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_queuing_lock()
1505 static int __kmp_release_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_queuing_lock_with_checks()
1525 void __kmp_init_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_init_queuing_lock()
1538 void __kmp_destroy_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_queuing_lock()
1549 static void __kmp_destroy_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_queuing_lock_with_checks()
1565 int __kmp_acquire_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_queuing_lock()
1583 __kmp_acquire_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_nested_queuing_lock_with_checks()
1595 int __kmp_test_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_queuing_lock()
1613 static int __kmp_test_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_nested_queuing_lock_with_checks()
1625 int __kmp_release_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_queuing_lock()
1639 __kmp_release_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_nested_queuing_lock_with_checks()
1658 void __kmp_init_nested_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_init_nested_queuing_lock()
1663 void __kmp_destroy_nested_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_nested_queuing_lock()
1669 __kmp_destroy_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_nested_queuing_lock_with_checks()
1685 static const ident_t *__kmp_get_queuing_lock_location(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_location()
1689 static void __kmp_set_queuing_lock_location(kmp_queuing_lock_t *lck, in __kmp_set_queuing_lock_location()
1694 static kmp_lock_flags_t __kmp_get_queuing_lock_flags(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_flags()
1698 static void __kmp_set_queuing_lock_flags(kmp_queuing_lock_t *lck, in __kmp_set_queuing_lock_flags()
1823 kmp_adaptive_lock_info_t *lck = &liveLocks; in __kmp_init_speculative_stats() local
1837 static void __kmp_remember_lock(kmp_adaptive_lock_info_t *lck) { in __kmp_remember_lock()
1852 static void __kmp_forget_lock(kmp_adaptive_lock_info_t *lck) { in __kmp_forget_lock()
1863 static void __kmp_zero_speculative_stats(kmp_adaptive_lock_info_t *lck) { in __kmp_zero_speculative_stats()
1870 kmp_adaptive_lock_info_t *lck) { in __kmp_add_stats()
1881 static void __kmp_accumulate_speculative_stats(kmp_adaptive_lock_info_t *lck) { in __kmp_accumulate_speculative_stats()
1910 kmp_adaptive_lock_info_t *lck; in __kmp_print_speculative_stats() local
1961 #define KMP_INC_STAT(lck, stat) (lck->lk.adaptive.stats.stat++) argument
1963 #define KMP_INC_STAT(lck, stat) argument
1967 static inline bool __kmp_is_unlocked_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_is_unlocked_queuing_lock()
1985 __kmp_update_badness_after_success(kmp_adaptive_lock_t *lck) { in __kmp_update_badness_after_success()
1992 static __inline void __kmp_step_badness(kmp_adaptive_lock_t *lck) { in __kmp_step_badness()
2003 static __inline int __kmp_should_speculate(kmp_adaptive_lock_t *lck, in __kmp_should_speculate()
2014 static int __kmp_test_adaptive_lock_only(kmp_adaptive_lock_t *lck, in __kmp_test_adaptive_lock_only()
2062 static int __kmp_test_adaptive_lock(kmp_adaptive_lock_t *lck, kmp_int32 gtid) { in __kmp_test_adaptive_lock()
2081 static int __kmp_test_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_test_adaptive_lock_with_checks()
2107 static void __kmp_acquire_adaptive_lock(kmp_adaptive_lock_t *lck, in __kmp_acquire_adaptive_lock()
2140 static void __kmp_acquire_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_acquire_adaptive_lock_with_checks()
2156 static int __kmp_release_adaptive_lock(kmp_adaptive_lock_t *lck, in __kmp_release_adaptive_lock()
2171 static int __kmp_release_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_release_adaptive_lock_with_checks()
2189 static void __kmp_init_adaptive_lock(kmp_adaptive_lock_t *lck) { in __kmp_init_adaptive_lock()
2202 static void __kmp_destroy_adaptive_lock(kmp_adaptive_lock_t *lck) { in __kmp_destroy_adaptive_lock()
2210 static void __kmp_destroy_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck) { in __kmp_destroy_adaptive_lock_with_checks()
2227 static kmp_int32 __kmp_get_drdpa_lock_owner(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_owner()
2231 static inline bool __kmp_is_drdpa_lock_nestable(kmp_drdpa_lock_t *lck) { in __kmp_is_drdpa_lock_nestable()
2236 __kmp_acquire_drdpa_lock_timed_template(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock_timed_template()
2368 int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock()
2374 static int __kmp_acquire_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_acquire_drdpa_lock_with_checks()
2393 int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_drdpa_lock()
2420 static int __kmp_test_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_test_drdpa_lock_with_checks()
2438 int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_drdpa_lock()
2452 static int __kmp_release_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_release_drdpa_lock_with_checks()
2473 void __kmp_init_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_init_drdpa_lock()
2490 void __kmp_destroy_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_destroy_drdpa_lock()
2510 static void __kmp_destroy_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_destroy_drdpa_lock_with_checks()
2526 int __kmp_acquire_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_drdpa_lock()
2543 static void __kmp_acquire_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_acquire_nested_drdpa_lock_with_checks()
2555 int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_drdpa_lock()
2573 static int __kmp_test_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_test_nested_drdpa_lock_with_checks()
2585 int __kmp_release_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_drdpa_lock()
2598 static int __kmp_release_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_release_nested_drdpa_lock_with_checks()
2617 void __kmp_init_nested_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_init_nested_drdpa_lock()
2622 void __kmp_destroy_nested_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_destroy_nested_drdpa_lock()
2627 static void __kmp_destroy_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_destroy_nested_drdpa_lock_with_checks()
2643 static const ident_t *__kmp_get_drdpa_lock_location(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_location()
2647 static void __kmp_set_drdpa_lock_location(kmp_drdpa_lock_t *lck, in __kmp_set_drdpa_lock_location()
2652 static kmp_lock_flags_t __kmp_get_drdpa_lock_flags(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_flags()
2656 static void __kmp_set_drdpa_lock_flags(kmp_drdpa_lock_t *lck, in __kmp_set_drdpa_lock_flags()
2701 static void __kmp_init_direct_lock(kmp_dyna_lock_t *lck, in __kmp_init_direct_lock()
2720 static void __kmp_destroy_hle_lock(kmp_dyna_lock_t *lck) { TCW_4(*lck, 0); } in __kmp_destroy_hle_lock()
2722 static void __kmp_destroy_hle_lock_with_checks(kmp_dyna_lock_t *lck) { in __kmp_destroy_hle_lock_with_checks()
2726 static void __kmp_acquire_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_hle_lock()
2740 static void __kmp_acquire_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_acquire_hle_lock_with_checks()
2745 static int __kmp_release_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_release_hle_lock()
2753 static int __kmp_release_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_release_hle_lock_with_checks()
2758 static int __kmp_test_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_test_hle_lock()
2762 static int __kmp_test_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_test_hle_lock_with_checks()
2767 static void __kmp_init_rtm_lock(kmp_queuing_lock_t *lck) { in __kmp_init_rtm_lock()
2771 static void __kmp_destroy_rtm_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_rtm_lock()
2775 static void __kmp_destroy_rtm_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_rtm_lock_with_checks()
2780 static void __kmp_acquire_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_rtm_lock()
2802 static void __kmp_acquire_rtm_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_rtm_lock_with_checks()
2808 static int __kmp_release_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_rtm_lock()
2819 static int __kmp_release_rtm_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_rtm_lock_with_checks()
2825 static int __kmp_test_rtm_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_rtm_lock()
2839 static int __kmp_test_rtm_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_rtm_lock_with_checks()
3034 kmp_indirect_lock_t *lck; in __kmp_allocate_indirect_lock() local
3092 kmp_indirect_lock_t *lck = NULL; in __kmp_lookup_indirect_lock() local
3198 kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p lck, kmp_uint32 seq) { in __kmp_get_user_lock_owner()
3367 static void __kmp_init_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_init_tas_lock_with_checks()
3371 static void __kmp_init_nested_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_init_nested_tas_lock_with_checks()
3376 static void __kmp_init_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_init_futex_lock_with_checks()
3380 static void __kmp_init_nested_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_init_nested_futex_lock_with_checks()
3385 static int __kmp_is_ticket_lock_initialized(kmp_ticket_lock_t *lck) { in __kmp_is_ticket_lock_initialized()
3389 static void __kmp_init_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_init_ticket_lock_with_checks()
3393 static void __kmp_init_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_init_nested_ticket_lock_with_checks()
3397 static int __kmp_is_queuing_lock_initialized(kmp_queuing_lock_t *lck) { in __kmp_is_queuing_lock_initialized()
3401 static void __kmp_init_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_init_queuing_lock_with_checks()
3406 __kmp_init_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_init_nested_queuing_lock_with_checks()
3411 static void __kmp_init_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck) { in __kmp_init_adaptive_lock_with_checks()
3416 static int __kmp_is_drdpa_lock_initialized(kmp_drdpa_lock_t *lck) { in __kmp_is_drdpa_lock_initialized()
3420 static void __kmp_init_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_init_drdpa_lock_with_checks()
3424 static void __kmp_init_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_init_nested_drdpa_lock_with_checks()
3687 static kmp_lock_index_t __kmp_lock_table_insert(kmp_user_lock_p lck) { in __kmp_lock_table_insert()
3748 kmp_user_lock_p lck; in __kmp_user_lock_allocate() local
3795 kmp_user_lock_p lck) { in __kmp_user_lock_free()
3813 kmp_user_lock_p lck = NULL; in __kmp_lookup_user_lock() local
3849 #define IS_CRITICAL(lck) \ in __kmp_cleanup_user_locks() argument
3881 kmp_user_lock_p lck = in __kmp_cleanup_user_locks() local