Lines Matching defs:thr

18 ALWAYS_INLINE USED bool TryTraceMemoryAccess(ThreadState* thr, uptr pc,
25 if (UNLIKELY(!TraceAcquire(thr, &ev)))
28 uptr pc_delta = pc - thr->trace_prev_pc + (1 << (EventAccess::kPCBits - 1));
29 thr->trace_prev_pc = pc;
38 TraceRelease(thr, ev);
52 TraceRelease(thr, evex);
57 bool TryTraceMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr, uptr size,
62 if (UNLIKELY(!TraceAcquire(thr, &ev)))
64 thr->trace_prev_pc = pc;
74 TraceRelease(thr, ev);
78 void TraceMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr, uptr size,
80 if (LIKELY(TryTraceMemoryAccessRange(thr, pc, addr, size, typ)))
82 TraceSwitchPart(thr);
83 UNUSED bool res = TryTraceMemoryAccessRange(thr, pc, addr, size, typ);
87 void TraceFunc(ThreadState* thr, uptr pc) {
88 if (LIKELY(TryTraceFunc(thr, pc)))
90 TraceSwitchPart(thr);
91 UNUSED bool res = TryTraceFunc(thr, pc);
95 NOINLINE void TraceRestartFuncEntry(ThreadState* thr, uptr pc) {
96 TraceSwitchPart(thr);
97 FuncEntry(thr, pc);
100 NOINLINE void TraceRestartFuncExit(ThreadState* thr) {
101 TraceSwitchPart(thr);
102 FuncExit(thr);
105 void TraceMutexLock(ThreadState* thr, EventType type, uptr pc, uptr addr,
119 TraceEvent(thr, ev);
122 void TraceMutexUnlock(ThreadState* thr, uptr addr) {
131 TraceEvent(thr, ev);
134 void TraceTime(ThreadState* thr) {
137 FastState fast_state = thr->fast_state;
145 TraceEvent(thr, ev);
148 NOINLINE void DoReportRace(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
165 SlotUnlock(thr);
166 ReportRace(thr, shadow_mem, cur, Shadow(old), typ);
168 SlotLock(thr);
195 bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
218 if (LIKELY(thr->clock.Get(old.sid()) >= old.epoch()))
220 DoReportRace(thr, shadow_mem, cur, old, typ);
229 atomic_load_relaxed(&thr->trace_pos) / sizeof(Event) % kShadowCnt;
274 NOINLINE void DoReportRaceV(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
301 DoReportRace(thr, shadow_mem, cur, prev, typ);
305 bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
349 index = (atomic_load_relaxed(&thr->trace_pos) / 2) % 16;
365 u16 epoch = static_cast<u16>(thr->clock.Get(static_cast<Sid>(sid))); \
380 DoReportRaceV(thr, shadow_mem, cur, concurrent_mask, shadow, typ);
414 NOINLINE void TraceRestartMemoryAccess(ThreadState* thr, uptr pc, uptr addr,
416 TraceSwitchPart(thr);
417 MemoryAccess(thr, pc, addr, size, typ);
420 ALWAYS_INLINE USED void MemoryAccess(ThreadState* thr, uptr pc, uptr addr,
424 DPrintf2("#%d: Access: %d@%d %p/%zd typ=0x%x {%s, %s, %s, %s}\n", thr->tid,
425 static_cast<int>(thr->fast_state.sid()),
426 static_cast<int>(thr->fast_state.epoch()), (void*)addr, size,
432 FastState fast_state = thr->fast_state;
440 if (!TryTraceMemoryAccess(thr, pc, addr, size, typ))
441 return TraceRestartMemoryAccess(thr, pc, addr, size, typ);
442 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
445 void MemoryAccess16(ThreadState* thr, uptr pc, uptr addr, AccessType typ);
448 void RestartMemoryAccess16(ThreadState* thr, uptr pc, uptr addr,
450 TraceSwitchPart(thr);
451 MemoryAccess16(thr, pc, addr, typ);
454 ALWAYS_INLINE USED void MemoryAccess16(ThreadState* thr, uptr pc, uptr addr,
457 FastState fast_state = thr->fast_state;
467 if (!TryTraceMemoryAccessRange(thr, pc, addr, size, typ))
468 return RestartMemoryAccess16(thr, pc, addr, typ);
470 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
478 if (!traced && !TryTraceMemoryAccessRange(thr, pc, addr, size, typ))
479 return RestartMemoryAccess16(thr, pc, addr, typ);
480 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
484 void RestartUnalignedMemoryAccess(ThreadState* thr, uptr pc, uptr addr,
486 TraceSwitchPart(thr);
487 UnalignedMemoryAccess(thr, pc, addr, size, typ);
490 ALWAYS_INLINE USED void UnalignedMemoryAccess(ThreadState* thr, uptr pc,
494 FastState fast_state = thr->fast_state;
505 if (!TryTraceMemoryAccessRange(thr, pc, addr, size, typ))
506 return RestartUnalignedMemoryAccess(thr, pc, addr, size, typ);
508 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
520 if (!traced && !TryTraceMemoryAccessRange(thr, pc, addr, size, typ))
521 return RestartUnalignedMemoryAccess(thr, pc, addr, size, typ);
522 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
583 void MemoryResetRange(ThreadState* thr, uptr pc, uptr addr, uptr size) {
589 void MemoryRangeFreed(ThreadState* thr, uptr pc, uptr addr, uptr size) {
598 DCHECK(thr->slot_locked);
607 TraceMemoryAccessRange(thr, pc, addr, size, typ);
609 Shadow cur(thr->fast_state, 0, kShadowCell, typ);
617 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
623 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, 0, 0, typ)))
633 void MemoryRangeImitateWrite(ThreadState* thr, uptr pc, uptr addr, uptr size) {
636 TraceMemoryAccessRange(thr, pc, addr, size, kAccessWrite);
637 Shadow cur(thr->fast_state, 0, 8, kAccessWrite);
641 void MemoryRangeImitateWriteOrResetRange(ThreadState* thr, uptr pc, uptr addr,
643 if (thr->ignore_reads_and_writes == 0)
644 MemoryRangeImitateWrite(thr, pc, addr, size);
646 MemoryResetRange(thr, pc, addr, size);
650 bool MemoryAccessRangeOne(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
655 return CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
659 NOINLINE void RestartMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr,
661 TraceSwitchPart(thr);
662 MemoryAccessRangeT<is_read>(thr, pc, addr, size);
666 void MemoryAccessRangeT(ThreadState* thr, uptr pc, uptr addr, uptr size) {
670 DPrintf2("#%d: MemoryAccessRange: @%p %p size=%d is_read=%d\n", thr->tid,
710 FastState fast_state = thr->fast_state;
714 if (!TryTraceMemoryAccessRange(thr, pc, addr, size, typ))
715 return RestartMemoryAccessRange<is_read>(thr, pc, addr, size);
722 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))
729 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))
735 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))
740 template void MemoryAccessRangeT<true>(ThreadState* thr, uptr pc, uptr addr,
742 template void MemoryAccessRangeT<false>(ThreadState* thr, uptr pc, uptr addr,