Lines Matching defs:kmp_int64
140 typedef __int64 kmp_int64;
148 typedef struct kmp_struct64 kmp_int64;
173 typedef long long kmp_int64;
211 typedef kmp_int64 kmp_int;
497 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
516 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
547 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
548 (kmp_int64)(sv))
550 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
551 (kmp_int64)(sv))
590 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
591 kmp_int64 cv, kmp_int64 sv) {
595 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
596 kmp_int64 cv, kmp_int64 sv) {
613 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
614 (kmp_int64)(cv))
623 kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
638 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
639 kmp_int64 sv);
646 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
647 kmp_int64 cv, kmp_int64 sv);
652 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
688 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
689 (kmp_int64)(sv))
691 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
692 (kmp_int64)(sv))
700 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
701 (kmp_int64)(sv))
709 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
710 (kmp_int64)(sv))
730 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
732 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
735 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
737 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
745 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
747 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
749 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
751 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
756 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
758 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
760 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
776 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
898 kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
912 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
922 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
923 kmp_int64 sv);
930 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
931 kmp_int64 cv, kmp_int64 sv);
936 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
945 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
953 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
961 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
969 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
1003 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1004 (kmp_int64)(sv))
1006 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1007 (kmp_int64)(sv))
1015 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1016 (kmp_int64)(sv))
1027 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1028 (kmp_int64)(sv))
1127 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
1128 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
1151 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1152 (kmp_int64)(b), (kmp_int64)(c))