Lines Matching defs:thr
114 ThreadState *thr; // currently wired thread, or nullptr
143 ThreadState *thr;
242 void set_cur_thread(ThreadState *thr);
252 ThreadState *thr = reinterpret_cast<ThreadState *>(cur_thread_placeholder);
253 if (UNLIKELY(!thr->current))
254 thr->current = thr;
255 return thr->current;
257 inline void set_cur_thread(ThreadState *thr) {
258 reinterpret_cast<ThreadState *>(cur_thread_placeholder)->current = thr;
268 ThreadState *thr;
446 bool ShouldReport(ThreadState *thr, ReportType typ);
465 void ObtainCurrentStack(ThreadState *thr, uptr toppc, StackTraceTy *stack,
467 uptr size = thr->shadow_stack_pos - thr->shadow_stack;
473 stack->Init(&thr->shadow_stack[start], size, toppc);
477 #define GET_STACK_TRACE_FATAL(thr, pc) \
479 ObtainCurrentStack(thr, pc, &stack); \
485 void UnmapShadow(ThreadState *thr, uptr addr, uptr size);
492 void ForkBefore(ThreadState *thr, uptr pc);
493 void ForkParentAfter(ThreadState *thr, uptr pc);
494 void ForkChildAfter(ThreadState *thr, uptr pc, bool start_thread);
496 void ReportRace(ThreadState *thr, RawShadow *shadow_mem, Shadow cur, Shadow old,
498 bool OutputReport(ThreadState *thr, const ScopedReport &srep);
514 StackID CurrentStackId(ThreadState *thr, uptr pc);
516 void PrintCurrentStack(ThreadState *thr, uptr pc);
520 void Initialize(ThreadState *thr);
522 int Finalize(ThreadState *thr);
524 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write);
525 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write);
527 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr, uptr size,
529 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr, uptr size,
533 void MemoryAccessRangeT(ThreadState *thr, uptr pc, uptr addr, uptr size);
536 void MemoryAccessRange(ThreadState *thr, uptr pc, uptr addr, uptr size,
541 MemoryAccessRangeT<false>(thr, pc, addr, size);
543 MemoryAccessRangeT<true>(thr, pc, addr, size);
547 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size);
548 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size);
549 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size);
550 void MemoryRangeImitateWriteOrResetRange(ThreadState *thr, uptr pc, uptr addr,
553 void ThreadIgnoreBegin(ThreadState *thr, uptr pc);
554 void ThreadIgnoreEnd(ThreadState *thr);
555 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc);
556 void ThreadIgnoreSyncEnd(ThreadState *thr);
558 Tid ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached);
559 void ThreadStart(ThreadState *thr, Tid tid, tid_t os_id,
561 void ThreadFinish(ThreadState *thr);
562 Tid ThreadConsumeTid(ThreadState *thr, uptr pc, uptr uid);
563 void ThreadJoin(ThreadState *thr, uptr pc, Tid tid);
564 void ThreadDetach(ThreadState *thr, uptr pc, Tid tid);
565 void ThreadFinalize(ThreadState *thr);
566 void ThreadSetName(ThreadState *thr, const char *name);
567 int ThreadCount(ThreadState *thr);
568 void ProcessPendingSignalsImpl(ThreadState *thr);
569 void ThreadNotJoined(ThreadState *thr, uptr pc, Tid tid, uptr uid);
573 void ProcWire(Processor *proc, ThreadState *thr);
574 void ProcUnwire(Processor *proc, ThreadState *thr);
578 void MutexCreate(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
579 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
580 void MutexPreLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
581 void MutexPostLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0,
583 int MutexUnlock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
584 void MutexPreReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
585 void MutexPostReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
586 void MutexReadUnlock(ThreadState *thr, uptr pc, uptr addr);
587 void MutexReadOrWriteUnlock(ThreadState *thr, uptr pc, uptr addr);
588 void MutexRepair(ThreadState *thr, uptr pc, uptr addr); // call on EOWNERDEAD
589 void MutexInvalidAccess(ThreadState *thr, uptr pc, uptr addr);
591 void Acquire(ThreadState *thr, uptr pc, uptr addr);
598 void AcquireGlobal(ThreadState *thr);
599 void Release(ThreadState *thr, uptr pc, uptr addr);
600 void ReleaseStoreAcquire(ThreadState *thr, uptr pc, uptr addr);
601 void ReleaseStore(ThreadState *thr, uptr pc, uptr addr);
602 void AfterSleep(ThreadState *thr, uptr pc);
603 void IncrementEpoch(ThreadState *thr);
611 void SlotAttachAndLock(ThreadState *thr) SANITIZER_ACQUIRE(thr->slot->mtx);
612 void SlotDetach(ThreadState *thr);
613 void SlotLock(ThreadState *thr) SANITIZER_ACQUIRE(thr->slot->mtx);
614 void SlotUnlock(ThreadState *thr) SANITIZER_RELEASE(thr->slot->mtx);
615 void DoReset(ThreadState *thr, uptr epoch);
618 ThreadState *FiberCreate(ThreadState *thr, uptr pc, unsigned flags);
619 void FiberDestroy(ThreadState *thr, uptr pc, ThreadState *fiber);
620 void FiberSwitch(ThreadState *thr, uptr pc, ThreadState *fiber, unsigned flags);
631 SlotLocker(ThreadState *thr, bool recursive = false)
632 : thr_(thr), locked_(recursive ? thr->slot_locked : false) {
638 DCHECK(!atomic_load(&thr->in_blocking_func, memory_order_relaxed));
657 SlotUnlocker(ThreadState *thr) : thr_(thr), locked_(thr->slot_locked) {
672 ALWAYS_INLINE void ProcessPendingSignals(ThreadState *thr) {
673 if (UNLIKELY(atomic_load_relaxed(&thr->pending_signals)))
674 ProcessPendingSignalsImpl(thr);
680 void LazyInitialize(ThreadState *thr) {
687 Initialize(thr);
692 void TraceSwitchPart(ThreadState *thr);
693 void TraceSwitchPartImpl(ThreadState *thr);
699 ALWAYS_INLINE WARN_UNUSED_RESULT bool TraceAcquire(ThreadState *thr,
703 DCHECK(thr->shadow_stack);
704 Event *pos = reinterpret_cast<Event *>(atomic_load_relaxed(&thr->trace_pos));
709 { Lock lock(&thr->tctx->trace.mtx); }
710 TracePart *current = thr->tctx->trace.parts.Back();
729 ALWAYS_INLINE void TraceRelease(ThreadState *thr, EventT *evp) {
730 DCHECK_LE(evp + 1, &thr->tctx->trace.parts.Back()->events[TracePart::kSize]);
731 atomic_store_relaxed(&thr->trace_pos, (uptr)(evp + 1));
735 void TraceEvent(ThreadState *thr, EventT ev) {
737 if (!TraceAcquire(thr, &evp)) {
738 TraceSwitchPart(thr);
739 UNUSED bool res = TraceAcquire(thr, &evp);
743 TraceRelease(thr, evp);
746 ALWAYS_INLINE WARN_UNUSED_RESULT bool TryTraceFunc(ThreadState *thr,
751 if (UNLIKELY(!TraceAcquire(thr, &ev)))
756 TraceRelease(thr, ev);
761 bool TryTraceMemoryAccess(ThreadState *thr, uptr pc, uptr addr, uptr size,
764 bool TryTraceMemoryAccessRange(ThreadState *thr, uptr pc, uptr addr, uptr size,
766 void TraceMemoryAccessRange(ThreadState *thr, uptr pc, uptr addr, uptr size,
768 void TraceFunc(ThreadState *thr, uptr pc = 0);
769 void TraceMutexLock(ThreadState *thr, EventType type, uptr pc, uptr addr,
771 void TraceMutexUnlock(ThreadState *thr, uptr addr);
772 void TraceTime(ThreadState *thr);
774 void TraceRestartFuncExit(ThreadState *thr);
775 void TraceRestartFuncEntry(ThreadState *thr, uptr pc);
777 void GrowShadowStack(ThreadState *thr);
780 void FuncEntry(ThreadState *thr, uptr pc) {
781 DPrintf2("#%d: FuncEntry %p\n", (int)thr->fast_state.sid(), (void *)pc);
782 if (UNLIKELY(!TryTraceFunc(thr, pc)))
783 return TraceRestartFuncEntry(thr, pc);
784 DCHECK_GE(thr->shadow_stack_pos, thr->shadow_stack);
786 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
788 if (thr->shadow_stack_pos == thr->shadow_stack_end)
789 GrowShadowStack(thr);
791 thr->shadow_stack_pos[0] = pc;
792 thr->shadow_stack_pos++;
796 void FuncExit(ThreadState *thr) {
797 DPrintf2("#%d: FuncExit\n", (int)thr->fast_state.sid());
798 if (UNLIKELY(!TryTraceFunc(thr, 0)))
799 return TraceRestartFuncExit(thr);
800 DCHECK_GT(thr->shadow_stack_pos, thr->shadow_stack);
802 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
804 thr->shadow_stack_pos--;