Lines Matching defs:thr

80 static TracePart* TracePartAlloc(ThreadState* thr) {
85 Trace* trace = &thr->tctx->trace;
89 DPrintf("#%d: TracePartAlloc: part=%p\n", thr->tid, part);
152 bool attached = tctx->thr && tctx->thr->slot;
172 &tctx->thr->trace_pos,
181 if (tctx->thr && !tctx->thr->slot) {
182 atomic_store_relaxed(&tctx->thr->trace_pos, 0);
183 tctx->thr->trace_prev_pc = 0;
196 slot.thr = nullptr;
233 void DoReset(ThreadState* thr, uptr epoch) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
245 DPrintf("#%d: DoReset epoch=%lu\n", thr ? thr->tid : -1, epoch);
252 static TidSlot* FindSlotAndLock(ThreadState* thr)
253 SANITIZER_ACQUIRE(thr->slot->mtx) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
254 CHECK(!thr->slot);
265 thr->slot_locked = false;
279 DoReset(thr, epoch);
283 CHECK(!thr->slot_locked);
284 thr->slot_locked = true;
285 if (slot->thr) {
286 DPrintf("#%d: preempting sid=%d tid=%d\n", thr->tid, (u32)slot->sid,
287 slot->thr->tid);
288 slot->SetEpoch(slot->thr->fast_state.epoch());
289 slot->thr = nullptr;
296 void SlotAttachAndLock(ThreadState* thr) {
297 TidSlot* slot = FindSlotAndLock(thr);
298 DPrintf("#%d: SlotAttach: slot=%u\n", thr->tid, static_cast<int>(slot->sid));
299 CHECK(!slot->thr);
300 CHECK(!thr->slot);
301 slot->thr = thr;
302 thr->slot = slot;
306 thr->fast_state.SetSid(slot->sid);
307 thr->fast_state.SetEpoch(epoch);
308 if (thr->slot_epoch != ctx->global_epoch) {
309 thr->slot_epoch = ctx->global_epoch;
310 thr->clock.Reset();
312 thr->last_sleep_stack_id = kInvalidStackID;
313 thr->last_sleep_clock.Reset();
316 thr->clock.Set(slot->sid, epoch);
317 slot->journal.PushBack({thr->tid, epoch});
320 static void SlotDetachImpl(ThreadState* thr, bool exiting) {
321 TidSlot* slot = thr->slot;
322 thr->slot = nullptr;
323 if (thr != slot->thr) {
325 if (thr->slot_epoch != ctx->global_epoch) {
327 auto* trace = &thr->tctx->trace;
336 thr->tctx->trace.local_head = nullptr;
337 atomic_store_relaxed(&thr->trace_pos, 0);
338 thr->trace_prev_pc = 0;
347 CHECK(exiting || thr->fast_state.epoch() == kEpochLast);
348 slot->SetEpoch(thr->fast_state.epoch());
349 slot->thr = nullptr;
352 void SlotDetach(ThreadState* thr) {
353 Lock lock(&thr->slot->mtx);
354 SlotDetachImpl(thr, true);
357 void SlotLock(ThreadState* thr) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
358 DCHECK(!thr->slot_locked);
366 TidSlot* slot = thr->slot;
368 thr->slot_locked = true;
369 if (LIKELY(thr == slot->thr && thr->fast_state.epoch() != kEpochLast))
371 SlotDetachImpl(thr, false);
372 thr->slot_locked = false;
374 SlotAttachAndLock(thr);
377 void SlotUnlock(ThreadState* thr) {
378 DCHECK(thr->slot_locked);
379 thr->slot_locked = false;
380 thr->slot->mtx.Unlock();
569 void UnmapShadow(ThreadState *thr, uptr addr, uptr size) {
574 SlotLocker locker(thr, true);
575 ctx->metamap.ResetRange(thr->proc(), addr, size, true);
670 ThreadState* thr = cur_thread();
671 thr->nomalloc = false;
672 thr->ignore_sync++;
673 thr->ignore_reads_and_writes++;
674 atomic_store_relaxed(&thr->in_signal_handler, 0);
682 void Initialize(ThreadState *thr) {
710 ProcWire(proc, thr);
733 ThreadStart(thr, tid, GetTid(), ThreadType::Regular);
769 int Finalize(ThreadState *thr) {
777 if (flags()->atexit_sleep_ms > 0 && ThreadCount(thr) > 1)
789 ThreadFinalize(thr);
809 void ForkBefore(ThreadState* thr, uptr pc) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
814 SlotDetach(thr);
825 thr->suppress_reports++;
830 thr->ignore_interceptors++;
832 thr->ignore_reads_and_writes++;
839 static void ForkAfter(ThreadState* thr,
841 thr->suppress_reports--; // Enabled in ForkBefore.
842 thr->ignore_interceptors--;
843 thr->ignore_reads_and_writes--;
849 SlotAttachAndLock(thr);
850 SlotUnlock(thr);
855 void ForkParentAfter(ThreadState* thr, uptr pc) { ForkAfter(thr, false); }
857 void ForkChildAfter(ThreadState* thr, uptr pc, bool start_thread) {
858 ForkAfter(thr, true);
859 u32 nthread = ctx->thread_registry.OnFork(thr->tid);
872 thr->ignore_interceptors++;
873 thr->suppress_reports++;
874 ThreadIgnoreBegin(thr, pc);
875 ThreadIgnoreSyncBegin(thr, pc);
882 void GrowShadowStack(ThreadState *thr) {
883 const int sz = thr->shadow_stack_end - thr->shadow_stack;
886 internal_memcpy(newstack, thr->shadow_stack, sz * sizeof(uptr));
887 Free(thr->shadow_stack);
888 thr->shadow_stack = newstack;
889 thr->shadow_stack_pos = newstack + sz;
890 thr->shadow_stack_end = newstack + newsz;
894 StackID CurrentStackId(ThreadState *thr, uptr pc) {
896 if (!thr->is_inited) // May happen during bootstrap.
901 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
903 if (thr->shadow_stack_pos == thr->shadow_stack_end)
904 GrowShadowStack(thr);
906 thr->shadow_stack_pos[0] = pc;
907 thr->shadow_stack_pos++;
910 StackTrace(thr->shadow_stack, thr->shadow_stack_pos - thr->shadow_stack));
912 thr->shadow_stack_pos--;
916 static bool TraceSkipGap(ThreadState* thr) {
917 Trace *trace = &thr->tctx->trace;
918 Event *pos = reinterpret_cast<Event *>(atomic_load_relaxed(&thr->trace_pos));
921 DPrintf("#%d: TraceSwitchPart enter trace=%p parts=%p-%p pos=%p\n", thr->tid,
938 atomic_store_relaxed(&thr->trace_pos, reinterpret_cast<uptr>(pos));
947 void TraceSwitchPart(ThreadState* thr) {
948 if (TraceSkipGap(thr))
953 TracePart* part = thr->tctx->trace.parts.Back();
955 atomic_store_relaxed(&thr->trace_pos,
961 TraceSwitchPartImpl(thr);
964 void TraceSwitchPartImpl(ThreadState* thr) {
965 SlotLocker locker(thr, true);
966 Trace* trace = &thr->tctx->trace;
967 TracePart* part = TracePartAlloc(thr);
969 thr->trace_prev_pc = 0;
983 atomic_store_relaxed(&thr->trace_pos,
988 TraceTime(thr);
995 uptr* pos = Max(&thr->shadow_stack[0], thr->shadow_stack_pos - kMaxFrames);
996 for (; pos < thr->shadow_stack_pos; pos++) {
997 if (TryTraceFunc(thr, *pos))
999 CHECK(TraceSkipGap(thr));
1000 CHECK(TryTraceFunc(thr, *pos));
1003 for (uptr i = 0; i < thr->mset.Size(); i++) {
1004 MutexSet::Desc d = thr->mset.Get(i);
1006 TraceMutexLock(thr, d.write ? EventType::kLock : EventType::kRLock, 0,
1014 if (!TraceAcquire(thr, &ev)) {
1015 CHECK(TraceSkipGap(thr));
1016 CHECK(TraceAcquire(thr, &ev));
1028 if (ctx->slot_queue.Queued(thr->slot)) {
1029 ctx->slot_queue.Remove(thr->slot);
1030 ctx->slot_queue.PushBack(thr->slot);
1035 DPrintf("#%d: TraceSwitchPart exit parts=%p-%p pos=0x%zx\n", thr->tid,
1037 atomic_load_relaxed(&thr->trace_pos));
1040 void ThreadIgnoreBegin(ThreadState* thr, uptr pc) {
1041 DPrintf("#%d: ThreadIgnoreBegin\n", thr->tid);
1042 thr->ignore_reads_and_writes++;
1043 CHECK_GT(thr->ignore_reads_and_writes, 0);
1044 thr->fast_state.SetIgnoreBit();
1047 thr->mop_ignore_set.Add(CurrentStackId(thr, pc));
1051 void ThreadIgnoreEnd(ThreadState *thr) {
1052 DPrintf("#%d: ThreadIgnoreEnd\n", thr->tid);
1053 CHECK_GT(thr->ignore_reads_and_writes, 0);
1054 thr->ignore_reads_and_writes--;
1055 if (thr->ignore_reads_and_writes == 0) {
1056 thr->fast_state.ClearIgnoreBit();
1058 thr->mop_ignore_set.Reset();
1066 ThreadState *thr = cur_thread();
1067 return thr->shadow_stack_pos - thr->shadow_stack;
1071 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc) {
1072 DPrintf("#%d: ThreadIgnoreSyncBegin\n", thr->tid);
1073 thr->ignore_sync++;
1074 CHECK_GT(thr->ignore_sync, 0);
1077 thr->sync_ignore_set.Add(CurrentStackId(thr, pc));
1081 void ThreadIgnoreSyncEnd(ThreadState *thr) {
1082 DPrintf("#%d: ThreadIgnoreSyncEnd\n", thr->tid);
1083 CHECK_GT(thr->ignore_sync, 0);
1084 thr->ignore_sync--;
1086 if (thr->ignore_sync == 0)
1087 thr->sync_ignore_set.Reset();