Lines Matching refs:kmp_int32
123 std::atomic<kmp_int32> poll;
124 kmp_int32 depth_locked; // depth locked, for nested locks only
144 extern int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
145 extern int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
146 extern int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
150 extern int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
151 extern int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
152 extern int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid);
179 volatile kmp_int32 poll; // KMP_LOCK_FREE(futex) => unlocked
182 kmp_int32 depth_locked; // depth locked, for nested locks only
203 extern int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
204 extern int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
205 extern int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
210 kmp_int32 gtid);
211 extern int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid);
213 kmp_int32 gtid);
286 extern int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
287 extern int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
289 kmp_int32 gtid);
290 extern int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid);
295 kmp_int32 gtid);
297 kmp_int32 gtid);
299 kmp_int32 gtid);
363 volatile kmp_int32
366 volatile kmp_int32
374 volatile kmp_int32 owner_id; // (gtid+1) of owning thread, 0 if unlocked
375 kmp_int32 depth_locked; // depth locked, for nested locks only
394 extern int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
395 extern int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
396 extern int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid);
401 kmp_int32 gtid);
403 kmp_int32 gtid);
405 kmp_int32 gtid);
475 kmp_int32 depth_locked; // depth locked
491 extern int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
492 extern int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
493 extern int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
498 kmp_int32 gtid);
499 extern int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid);
501 kmp_int32 gtid);
555 static inline int __kmp_acquire_lock(kmp_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_lock()
559 static inline int __kmp_test_lock(kmp_lock_t *lck, kmp_int32 gtid) { in __kmp_test_lock()
563 static inline void __kmp_release_lock(kmp_lock_t *lck, kmp_int32 gtid) { in __kmp_release_lock()
625 extern kmp_int32 (*__kmp_get_user_lock_owner_)(kmp_user_lock_p lck);
627 static inline kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p lck) { in __kmp_get_user_lock_owner()
633 kmp_int32 gtid);
669 kmp_int32 gtid) { in __kmp_acquire_user_lock_with_checks()
676 kmp_int32 gtid);
684 kmp_int32 gtid) { in __kmp_test_user_lock_with_checks()
702 kmp_int32 gtid) { in __kmp_test_user_lock_with_checks()
709 kmp_int32 gtid);
712 kmp_int32 gtid) { in __kmp_release_user_lock_with_checks()
741 kmp_int32 gtid);
780 __kmp_acquire_nested_user_lock_with_checks(kmp_user_lock_p lck, kmp_int32 gtid, in __kmp_acquire_nested_user_lock_with_checks()
788 kmp_int32 gtid);
792 kmp_int32 gtid) { in __kmp_test_nested_user_lock_with_checks()
821 kmp_int32 gtid) { in __kmp_test_nested_user_lock_with_checks()
828 kmp_int32 gtid);
832 kmp_int32 gtid) { in __kmp_release_nested_user_lock_with_checks()
909 kmp_user_lock_p, kmp_int32))__kmp_acquire##nest##kind##_##suffix; \
911 kmp_user_lock_p, kmp_int32))__kmp_release##nest##kind##_##suffix; \
913 kmp_user_lock_p, kmp_int32))__kmp_test##nest##kind##_##suffix; \
973 kmp_int32 gtid,
975 extern void __kmp_user_lock_free(void **user_lock, kmp_int32 gtid,
1126 extern int (**__kmp_direct_set)(kmp_dyna_lock_t *, kmp_int32);
1127 extern int (**__kmp_direct_unset)(kmp_dyna_lock_t *, kmp_int32);
1128 extern int (**__kmp_direct_test)(kmp_dyna_lock_t *, kmp_int32);
1134 extern int (**__kmp_indirect_set)(kmp_user_lock_p, kmp_int32);
1135 extern int (**__kmp_indirect_unset)(kmp_user_lock_p, kmp_int32);
1136 extern int (**__kmp_indirect_test)(kmp_user_lock_p, kmp_int32);
1178 __kmp_allocate_indirect_lock(void **, kmp_int32, kmp_indirect_locktag_t);
1248 extern kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p, kmp_uint32);