Lines Matching defs:lck
70 static kmp_int32 __kmp_get_tas_lock_owner(kmp_tas_lock_t *lck) { in __kmp_get_tas_lock_owner()
74 static inline bool __kmp_is_tas_lock_nestable(kmp_tas_lock_t *lck) { in __kmp_is_tas_lock_nestable()
79 __kmp_acquire_tas_lock_timed_template(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock_timed_template()
118 int __kmp_acquire_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_tas_lock()
123 static int __kmp_acquire_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_acquire_tas_lock_with_checks()
136 int __kmp_test_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_tas_lock()
147 static int __kmp_test_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_test_tas_lock_with_checks()
157 int __kmp_release_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_tas_lock()
168 static int __kmp_release_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_release_tas_lock_with_checks()
186 void __kmp_init_tas_lock(kmp_tas_lock_t *lck) { in __kmp_init_tas_lock()
190 void __kmp_destroy_tas_lock(kmp_tas_lock_t *lck) { lck->lk.poll = 0; } in __kmp_destroy_tas_lock()
192 static void __kmp_destroy_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_destroy_tas_lock_with_checks()
206 int __kmp_acquire_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_tas_lock()
219 static int __kmp_acquire_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_acquire_nested_tas_lock_with_checks()
228 int __kmp_test_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_tas_lock()
244 static int __kmp_test_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_test_nested_tas_lock_with_checks()
253 int __kmp_release_nested_tas_lock(kmp_tas_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_tas_lock()
264 static int __kmp_release_nested_tas_lock_with_checks(kmp_tas_lock_t *lck, in __kmp_release_nested_tas_lock_with_checks()
280 void __kmp_init_nested_tas_lock(kmp_tas_lock_t *lck) { in __kmp_init_nested_tas_lock()
285 void __kmp_destroy_nested_tas_lock(kmp_tas_lock_t *lck) { in __kmp_destroy_nested_tas_lock()
290 static void __kmp_destroy_nested_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_destroy_nested_tas_lock_with_checks()
311 static kmp_int32 __kmp_get_futex_lock_owner(kmp_futex_lock_t *lck) { in __kmp_get_futex_lock_owner()
315 static inline bool __kmp_is_futex_lock_nestable(kmp_futex_lock_t *lck) { in __kmp_is_futex_lock_nestable()
320 __kmp_acquire_futex_lock_timed_template(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock_timed_template()
401 int __kmp_acquire_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_futex_lock()
406 static int __kmp_acquire_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_acquire_futex_lock_with_checks()
419 int __kmp_test_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_futex_lock()
428 static int __kmp_test_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_test_futex_lock_with_checks()
438 int __kmp_release_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_futex_lock()
469 static int __kmp_release_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_release_futex_lock_with_checks()
487 void __kmp_init_futex_lock(kmp_futex_lock_t *lck) { in __kmp_init_futex_lock()
491 void __kmp_destroy_futex_lock(kmp_futex_lock_t *lck) { lck->lk.poll = 0; } in __kmp_destroy_futex_lock()
493 static void __kmp_destroy_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_destroy_futex_lock_with_checks()
507 int __kmp_acquire_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_futex_lock()
520 static int __kmp_acquire_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_acquire_nested_futex_lock_with_checks()
529 int __kmp_test_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_futex_lock()
545 static int __kmp_test_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_test_nested_futex_lock_with_checks()
554 int __kmp_release_nested_futex_lock(kmp_futex_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_futex_lock()
565 static int __kmp_release_nested_futex_lock_with_checks(kmp_futex_lock_t *lck, in __kmp_release_nested_futex_lock_with_checks()
581 void __kmp_init_nested_futex_lock(kmp_futex_lock_t *lck) { in __kmp_init_nested_futex_lock()
586 void __kmp_destroy_nested_futex_lock(kmp_futex_lock_t *lck) { in __kmp_destroy_nested_futex_lock()
591 static void __kmp_destroy_nested_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_destroy_nested_futex_lock_with_checks()
607 static kmp_int32 __kmp_get_ticket_lock_owner(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_owner()
613 static inline bool __kmp_is_ticket_lock_nestable(kmp_ticket_lock_t *lck) { in __kmp_is_ticket_lock_nestable()
624 __kmp_acquire_ticket_lock_timed_template(kmp_ticket_lock_t *lck, in __kmp_acquire_ticket_lock_timed_template()
644 int __kmp_acquire_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_ticket_lock()
649 static int __kmp_acquire_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_acquire_ticket_lock_with_checks()
674 int __kmp_test_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_ticket_lock()
690 static int __kmp_test_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_test_ticket_lock_with_checks()
714 int __kmp_release_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_ticket_lock()
728 static int __kmp_release_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_release_ticket_lock_with_checks()
753 void __kmp_init_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_init_ticket_lock()
770 void __kmp_destroy_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_destroy_ticket_lock()
784 static void __kmp_destroy_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_destroy_ticket_lock_with_checks()
805 int __kmp_acquire_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_ticket_lock()
822 static int __kmp_acquire_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_acquire_nested_ticket_lock_with_checks()
839 int __kmp_test_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_ticket_lock()
860 static int __kmp_test_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_test_nested_ticket_lock_with_checks()
877 int __kmp_release_nested_ticket_lock(kmp_ticket_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_ticket_lock()
890 static int __kmp_release_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck, in __kmp_release_nested_ticket_lock_with_checks()
913 void __kmp_init_nested_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_init_nested_ticket_lock()
920 void __kmp_destroy_nested_ticket_lock(kmp_ticket_lock_t *lck) { in __kmp_destroy_nested_ticket_lock()
927 __kmp_destroy_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_destroy_nested_ticket_lock_with_checks()
948 static const ident_t *__kmp_get_ticket_lock_location(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_location()
952 static void __kmp_set_ticket_lock_location(kmp_ticket_lock_t *lck, in __kmp_set_ticket_lock_location()
957 static kmp_lock_flags_t __kmp_get_ticket_lock_flags(kmp_ticket_lock_t *lck) { in __kmp_get_ticket_lock_flags()
961 static void __kmp_set_ticket_lock_flags(kmp_ticket_lock_t *lck, in __kmp_set_ticket_lock_flags()
1036 kmp_queuing_lock_t *lck, kmp_int32 head_id, in __kmp_dump_queuing_lock()
1071 static kmp_int32 __kmp_get_queuing_lock_owner(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_owner()
1075 static inline bool __kmp_is_queuing_lock_nestable(kmp_queuing_lock_t *lck) { in __kmp_is_queuing_lock_nestable()
1084 __kmp_acquire_queuing_lock_timed_template(kmp_queuing_lock_t *lck, in __kmp_acquire_queuing_lock_timed_template()
1277 int __kmp_acquire_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_queuing_lock()
1284 static int __kmp_acquire_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_queuing_lock_with_checks()
1303 int __kmp_test_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_queuing_lock()
1335 static int __kmp_test_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_queuing_lock_with_checks()
1353 int __kmp_release_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_queuing_lock()
1496 static int __kmp_release_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_queuing_lock_with_checks()
1516 void __kmp_init_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_init_queuing_lock()
1529 void __kmp_destroy_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_queuing_lock()
1540 static void __kmp_destroy_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_queuing_lock_with_checks()
1556 int __kmp_acquire_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_queuing_lock()
1573 __kmp_acquire_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_nested_queuing_lock_with_checks()
1585 int __kmp_test_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_queuing_lock()
1603 static int __kmp_test_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_nested_queuing_lock_with_checks()
1615 int __kmp_release_nested_queuing_lock(kmp_queuing_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_queuing_lock()
1629 __kmp_release_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_nested_queuing_lock_with_checks()
1648 void __kmp_init_nested_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_init_nested_queuing_lock()
1653 void __kmp_destroy_nested_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_nested_queuing_lock()
1659 __kmp_destroy_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_nested_queuing_lock_with_checks()
1675 static const ident_t *__kmp_get_queuing_lock_location(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_location()
1679 static void __kmp_set_queuing_lock_location(kmp_queuing_lock_t *lck, in __kmp_set_queuing_lock_location()
1684 static kmp_lock_flags_t __kmp_get_queuing_lock_flags(kmp_queuing_lock_t *lck) { in __kmp_get_queuing_lock_flags()
1688 static void __kmp_set_queuing_lock_flags(kmp_queuing_lock_t *lck, in __kmp_set_queuing_lock_flags()
1813 kmp_adaptive_lock_info_t *lck = &liveLocks; in __kmp_init_speculative_stats() local
1827 static void __kmp_remember_lock(kmp_adaptive_lock_info_t *lck) { in __kmp_remember_lock()
1842 static void __kmp_forget_lock(kmp_adaptive_lock_info_t *lck) { in __kmp_forget_lock()
1853 static void __kmp_zero_speculative_stats(kmp_adaptive_lock_info_t *lck) { in __kmp_zero_speculative_stats()
1860 kmp_adaptive_lock_info_t *lck) { in __kmp_add_stats()
1871 static void __kmp_accumulate_speculative_stats(kmp_adaptive_lock_info_t *lck) { in __kmp_accumulate_speculative_stats()
1886 kmp_adaptive_lock_info_t *lck; in __kmp_print_speculative_stats() local
1944 #define KMP_INC_STAT(lck, stat) (lck->lk.adaptive.stats.stat++) argument
1946 #define KMP_INC_STAT(lck, stat) argument
1950 static inline bool __kmp_is_unlocked_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_is_unlocked_queuing_lock()
1968 __kmp_update_badness_after_success(kmp_adaptive_lock_t *lck) { in __kmp_update_badness_after_success()
1975 static __inline void __kmp_step_badness(kmp_adaptive_lock_t *lck) { in __kmp_step_badness()
1986 static __inline int __kmp_should_speculate(kmp_adaptive_lock_t *lck, in __kmp_should_speculate()
1997 static int __kmp_test_adaptive_lock_only(kmp_adaptive_lock_t *lck, in __kmp_test_adaptive_lock_only()
2045 static int __kmp_test_adaptive_lock(kmp_adaptive_lock_t *lck, kmp_int32 gtid) { in __kmp_test_adaptive_lock()
2064 static int __kmp_test_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_test_adaptive_lock_with_checks()
2090 static void __kmp_acquire_adaptive_lock(kmp_adaptive_lock_t *lck, in __kmp_acquire_adaptive_lock()
2122 static void __kmp_acquire_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_acquire_adaptive_lock_with_checks()
2138 static int __kmp_release_adaptive_lock(kmp_adaptive_lock_t *lck, in __kmp_release_adaptive_lock()
2153 static int __kmp_release_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck, in __kmp_release_adaptive_lock_with_checks()
2171 static void __kmp_init_adaptive_lock(kmp_adaptive_lock_t *lck) { in __kmp_init_adaptive_lock()
2184 static void __kmp_destroy_adaptive_lock(kmp_adaptive_lock_t *lck) { in __kmp_destroy_adaptive_lock()
2192 static void __kmp_destroy_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck) { in __kmp_destroy_adaptive_lock_with_checks()
2209 static kmp_int32 __kmp_get_drdpa_lock_owner(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_owner()
2213 static inline bool __kmp_is_drdpa_lock_nestable(kmp_drdpa_lock_t *lck) { in __kmp_is_drdpa_lock_nestable()
2218 __kmp_acquire_drdpa_lock_timed_template(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock_timed_template()
2352 int __kmp_acquire_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_drdpa_lock()
2357 static int __kmp_acquire_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_acquire_drdpa_lock_with_checks()
2376 int __kmp_test_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_drdpa_lock()
2403 static int __kmp_test_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_test_drdpa_lock_with_checks()
2421 int __kmp_release_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_drdpa_lock()
2434 static int __kmp_release_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_release_drdpa_lock_with_checks()
2455 void __kmp_init_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_init_drdpa_lock()
2472 void __kmp_destroy_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_destroy_drdpa_lock()
2492 static void __kmp_destroy_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_destroy_drdpa_lock_with_checks()
2508 int __kmp_acquire_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_nested_drdpa_lock()
2524 static void __kmp_acquire_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_acquire_nested_drdpa_lock_with_checks()
2536 int __kmp_test_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_test_nested_drdpa_lock()
2554 static int __kmp_test_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_test_nested_drdpa_lock_with_checks()
2566 int __kmp_release_nested_drdpa_lock(kmp_drdpa_lock_t *lck, kmp_int32 gtid) { in __kmp_release_nested_drdpa_lock()
2579 static int __kmp_release_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck, in __kmp_release_nested_drdpa_lock_with_checks()
2598 void __kmp_init_nested_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_init_nested_drdpa_lock()
2603 void __kmp_destroy_nested_drdpa_lock(kmp_drdpa_lock_t *lck) { in __kmp_destroy_nested_drdpa_lock()
2608 static void __kmp_destroy_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_destroy_nested_drdpa_lock_with_checks()
2624 static const ident_t *__kmp_get_drdpa_lock_location(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_location()
2628 static void __kmp_set_drdpa_lock_location(kmp_drdpa_lock_t *lck, in __kmp_set_drdpa_lock_location()
2633 static kmp_lock_flags_t __kmp_get_drdpa_lock_flags(kmp_drdpa_lock_t *lck) { in __kmp_get_drdpa_lock_flags()
2637 static void __kmp_set_drdpa_lock_flags(kmp_drdpa_lock_t *lck, in __kmp_set_drdpa_lock_flags()
2690 static void __kmp_init_direct_lock(kmp_dyna_lock_t *lck, in __kmp_init_direct_lock()
2709 static void __kmp_destroy_hle_lock(kmp_dyna_lock_t *lck) { TCW_4(*lck, 0); } in __kmp_destroy_hle_lock()
2711 static void __kmp_destroy_hle_lock_with_checks(kmp_dyna_lock_t *lck) { in __kmp_destroy_hle_lock_with_checks()
2715 static void __kmp_acquire_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_acquire_hle_lock()
2729 static void __kmp_acquire_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_acquire_hle_lock_with_checks()
2734 static int __kmp_release_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_release_hle_lock()
2742 static int __kmp_release_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_release_hle_lock_with_checks()
2747 static int __kmp_test_hle_lock(kmp_dyna_lock_t *lck, kmp_int32 gtid) { in __kmp_test_hle_lock()
2751 static int __kmp_test_hle_lock_with_checks(kmp_dyna_lock_t *lck, in __kmp_test_hle_lock_with_checks()
2756 static void __kmp_init_rtm_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_init_rtm_queuing_lock()
2760 static void __kmp_destroy_rtm_queuing_lock(kmp_queuing_lock_t *lck) { in __kmp_destroy_rtm_queuing_lock()
2765 __kmp_destroy_rtm_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_destroy_rtm_queuing_lock_with_checks()
2770 static void __kmp_acquire_rtm_queuing_lock(kmp_queuing_lock_t *lck, in __kmp_acquire_rtm_queuing_lock()
2793 static void __kmp_acquire_rtm_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_acquire_rtm_queuing_lock_with_checks()
2799 static int __kmp_release_rtm_queuing_lock(kmp_queuing_lock_t *lck, in __kmp_release_rtm_queuing_lock()
2811 static int __kmp_release_rtm_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_release_rtm_queuing_lock_with_checks()
2817 static int __kmp_test_rtm_queuing_lock(kmp_queuing_lock_t *lck, in __kmp_test_rtm_queuing_lock()
2832 static int __kmp_test_rtm_queuing_lock_with_checks(kmp_queuing_lock_t *lck, in __kmp_test_rtm_queuing_lock_with_checks()
2840 static void __kmp_destroy_rtm_spin_lock(kmp_rtm_spin_lock_t *lck) { in __kmp_destroy_rtm_spin_lock()
2844 static void __kmp_destroy_rtm_spin_lock_with_checks(kmp_rtm_spin_lock_t *lck) { in __kmp_destroy_rtm_spin_lock_with_checks()
2849 static int __kmp_acquire_rtm_spin_lock(kmp_rtm_spin_lock_t *lck, in __kmp_acquire_rtm_spin_lock()
2881 static int __kmp_acquire_rtm_spin_lock_with_checks(kmp_rtm_spin_lock_t *lck, in __kmp_acquire_rtm_spin_lock_with_checks()
2887 static int __kmp_release_rtm_spin_lock(kmp_rtm_spin_lock_t *lck, in __kmp_release_rtm_spin_lock()
2900 static int __kmp_release_rtm_spin_lock_with_checks(kmp_rtm_spin_lock_t *lck, in __kmp_release_rtm_spin_lock_with_checks()
2906 static int __kmp_test_rtm_spin_lock(kmp_rtm_spin_lock_t *lck, kmp_int32 gtid) { in __kmp_test_rtm_spin_lock()
2928 static int __kmp_test_rtm_spin_lock_with_checks(kmp_rtm_spin_lock_t *lck, in __kmp_test_rtm_spin_lock_with_checks()
3123 kmp_indirect_lock_t *lck; in __kmp_allocate_indirect_lock() local
3196 kmp_indirect_lock_t *lck = NULL; in __kmp_lookup_indirect_lock() local
3299 kmp_int32 __kmp_get_user_lock_owner(kmp_user_lock_p lck, kmp_uint32 seq) { in __kmp_get_user_lock_owner()
3475 static void __kmp_init_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_init_tas_lock_with_checks()
3479 static void __kmp_init_nested_tas_lock_with_checks(kmp_tas_lock_t *lck) { in __kmp_init_nested_tas_lock_with_checks()
3484 static void __kmp_init_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_init_futex_lock_with_checks()
3488 static void __kmp_init_nested_futex_lock_with_checks(kmp_futex_lock_t *lck) { in __kmp_init_nested_futex_lock_with_checks()
3493 static int __kmp_is_ticket_lock_initialized(kmp_ticket_lock_t *lck) { in __kmp_is_ticket_lock_initialized()
3497 static void __kmp_init_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_init_ticket_lock_with_checks()
3501 static void __kmp_init_nested_ticket_lock_with_checks(kmp_ticket_lock_t *lck) { in __kmp_init_nested_ticket_lock_with_checks()
3505 static int __kmp_is_queuing_lock_initialized(kmp_queuing_lock_t *lck) { in __kmp_is_queuing_lock_initialized()
3509 static void __kmp_init_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_init_queuing_lock_with_checks()
3514 __kmp_init_nested_queuing_lock_with_checks(kmp_queuing_lock_t *lck) { in __kmp_init_nested_queuing_lock_with_checks()
3519 static void __kmp_init_adaptive_lock_with_checks(kmp_adaptive_lock_t *lck) { in __kmp_init_adaptive_lock_with_checks()
3524 static int __kmp_is_drdpa_lock_initialized(kmp_drdpa_lock_t *lck) { in __kmp_is_drdpa_lock_initialized()
3528 static void __kmp_init_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_init_drdpa_lock_with_checks()
3532 static void __kmp_init_nested_drdpa_lock_with_checks(kmp_drdpa_lock_t *lck) { in __kmp_init_nested_drdpa_lock_with_checks()
3795 static kmp_lock_index_t __kmp_lock_table_insert(kmp_user_lock_p lck) { in __kmp_lock_table_insert()
3856 kmp_user_lock_p lck; in __kmp_user_lock_allocate() local
3899 kmp_user_lock_p lck) { in __kmp_user_lock_free()
3917 kmp_user_lock_p lck = NULL; in __kmp_lookup_user_lock() local
3953 #define IS_CRITICAL(lck) \ in __kmp_cleanup_user_locks() argument
3985 kmp_user_lock_p lck = in __kmp_cleanup_user_locks() local