Lines Matching defs:thr
80 static TracePart* TracePartAlloc(ThreadState* thr) {
85 Trace* trace = &thr->tctx->trace;
89 DPrintf("#%d: TracePartAlloc: part=%p\n", thr->tid, part);
152 bool attached = tctx->thr && tctx->thr->slot;
172 &tctx->thr->trace_pos,
181 if (tctx->thr && !tctx->thr->slot) {
182 atomic_store_relaxed(&tctx->thr->trace_pos, 0);
183 tctx->thr->trace_prev_pc = 0;
196 slot.thr = nullptr;
233 void DoReset(ThreadState* thr, uptr epoch) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
245 DPrintf("#%d: DoReset epoch=%lu\n", thr ? thr->tid : -1, epoch);
252 static TidSlot* FindSlotAndLock(ThreadState* thr)
253 SANITIZER_ACQUIRE(thr->slot->mtx) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
254 CHECK(!thr->slot);
265 thr->slot_locked = false;
279 DoReset(thr, epoch);
283 CHECK(!thr->slot_locked);
284 thr->slot_locked = true;
285 if (slot->thr) {
286 DPrintf("#%d: preempting sid=%d tid=%d\n", thr->tid, (u32)slot->sid,
287 slot->thr->tid);
288 slot->SetEpoch(slot->thr->fast_state.epoch());
289 slot->thr = nullptr;
296 void SlotAttachAndLock(ThreadState* thr) {
297 TidSlot* slot = FindSlotAndLock(thr);
298 DPrintf("#%d: SlotAttach: slot=%u\n", thr->tid, static_cast<int>(slot->sid));
299 CHECK(!slot->thr);
300 CHECK(!thr->slot);
301 slot->thr = thr;
302 thr->slot = slot;
306 thr->fast_state.SetSid(slot->sid);
307 thr->fast_state.SetEpoch(epoch);
308 if (thr->slot_epoch != ctx->global_epoch) {
309 thr->slot_epoch = ctx->global_epoch;
310 thr->clock.Reset();
312 thr->last_sleep_stack_id = kInvalidStackID;
313 thr->last_sleep_clock.Reset();
316 thr->clock.Set(slot->sid, epoch);
317 slot->journal.PushBack({thr->tid, epoch});
320 static void SlotDetachImpl(ThreadState* thr, bool exiting) {
321 TidSlot* slot = thr->slot;
322 thr->slot = nullptr;
323 if (thr != slot->thr) {
325 if (thr->slot_epoch != ctx->global_epoch) {
327 auto* trace = &thr->tctx->trace;
336 thr->tctx->trace.local_head = nullptr;
337 atomic_store_relaxed(&thr->trace_pos, 0);
338 thr->trace_prev_pc = 0;
347 CHECK(exiting || thr->fast_state.epoch() == kEpochLast);
348 slot->SetEpoch(thr->fast_state.epoch());
349 slot->thr = nullptr;
352 void SlotDetach(ThreadState* thr) {
353 Lock lock(&thr->slot->mtx);
354 SlotDetachImpl(thr, true);
357 void SlotLock(ThreadState* thr) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
358 DCHECK(!thr->slot_locked);
366 TidSlot* slot = thr->slot;
368 thr->slot_locked = true;
369 if (LIKELY(thr == slot->thr && thr->fast_state.epoch() != kEpochLast))
371 SlotDetachImpl(thr, false);
372 thr->slot_locked = false;
374 SlotAttachAndLock(thr);
377 void SlotUnlock(ThreadState* thr) {
378 DCHECK(thr->slot_locked);
379 thr->slot_locked = false;
380 thr->slot->mtx.Unlock();
569 void UnmapShadow(ThreadState *thr, uptr addr, uptr size) {
574 SlotLocker locker(thr, true);
575 ctx->metamap.ResetRange(thr->proc(), addr, size, true);
670 ThreadState* thr = cur_thread();
671 thr->nomalloc = false;
672 thr->ignore_sync++;
673 thr->ignore_reads_and_writes++;
674 atomic_store_relaxed(&thr->in_signal_handler, 0);
681 void Initialize(ThreadState *thr) {
709 ProcWire(proc, thr);
732 ThreadStart(thr, tid, GetTid(), ThreadType::Regular);
768 int Finalize(ThreadState *thr) {
776 if (flags()->atexit_sleep_ms > 0 && ThreadCount(thr) > 1)
788 ThreadFinalize(thr);
808 void ForkBefore(ThreadState* thr, uptr pc) SANITIZER_NO_THREAD_SAFETY_ANALYSIS {
812 SlotDetach(thr);
823 thr->suppress_reports++;
828 thr->ignore_interceptors++;
830 thr->ignore_reads_and_writes++;
837 static void ForkAfter(ThreadState* thr,
839 thr->suppress_reports--; // Enabled in ForkBefore.
840 thr->ignore_interceptors--;
841 thr->ignore_reads_and_writes--;
847 SlotAttachAndLock(thr);
848 SlotUnlock(thr);
852 void ForkParentAfter(ThreadState* thr, uptr pc) { ForkAfter(thr, false); }
854 void ForkChildAfter(ThreadState* thr, uptr pc, bool start_thread) {
855 ForkAfter(thr, true);
856 u32 nthread = ctx->thread_registry.OnFork(thr->tid);
869 thr->ignore_interceptors++;
870 thr->suppress_reports++;
871 ThreadIgnoreBegin(thr, pc);
872 ThreadIgnoreSyncBegin(thr, pc);
879 void GrowShadowStack(ThreadState *thr) {
880 const int sz = thr->shadow_stack_end - thr->shadow_stack;
883 internal_memcpy(newstack, thr->shadow_stack, sz * sizeof(uptr));
884 Free(thr->shadow_stack);
885 thr->shadow_stack = newstack;
886 thr->shadow_stack_pos = newstack + sz;
887 thr->shadow_stack_end = newstack + newsz;
891 StackID CurrentStackId(ThreadState *thr, uptr pc) {
893 if (!thr->is_inited) // May happen during bootstrap.
898 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
900 if (thr->shadow_stack_pos == thr->shadow_stack_end)
901 GrowShadowStack(thr);
903 thr->shadow_stack_pos[0] = pc;
904 thr->shadow_stack_pos++;
907 StackTrace(thr->shadow_stack, thr->shadow_stack_pos - thr->shadow_stack));
909 thr->shadow_stack_pos--;
913 static bool TraceSkipGap(ThreadState* thr) {
914 Trace *trace = &thr->tctx->trace;
915 Event *pos = reinterpret_cast<Event *>(atomic_load_relaxed(&thr->trace_pos));
918 DPrintf("#%d: TraceSwitchPart enter trace=%p parts=%p-%p pos=%p\n", thr->tid,
935 atomic_store_relaxed(&thr->trace_pos, reinterpret_cast<uptr>(pos));
944 void TraceSwitchPart(ThreadState* thr) {
945 if (TraceSkipGap(thr))
950 TracePart* part = thr->tctx->trace.parts.Back();
952 atomic_store_relaxed(&thr->trace_pos,
958 TraceSwitchPartImpl(thr);
961 void TraceSwitchPartImpl(ThreadState* thr) {
962 SlotLocker locker(thr, true);
963 Trace* trace = &thr->tctx->trace;
964 TracePart* part = TracePartAlloc(thr);
966 thr->trace_prev_pc = 0;
980 atomic_store_relaxed(&thr->trace_pos,
985 TraceTime(thr);
992 uptr* pos = Max(&thr->shadow_stack[0], thr->shadow_stack_pos - kMaxFrames);
993 for (; pos < thr->shadow_stack_pos; pos++) {
994 if (TryTraceFunc(thr, *pos))
996 CHECK(TraceSkipGap(thr));
997 CHECK(TryTraceFunc(thr, *pos));
1000 for (uptr i = 0; i < thr->mset.Size(); i++) {
1001 MutexSet::Desc d = thr->mset.Get(i);
1003 TraceMutexLock(thr, d.write ? EventType::kLock : EventType::kRLock, 0,
1011 if (!TraceAcquire(thr, &ev)) {
1012 CHECK(TraceSkipGap(thr));
1013 CHECK(TraceAcquire(thr, &ev));
1025 if (ctx->slot_queue.Queued(thr->slot)) {
1026 ctx->slot_queue.Remove(thr->slot);
1027 ctx->slot_queue.PushBack(thr->slot);
1032 DPrintf("#%d: TraceSwitchPart exit parts=%p-%p pos=0x%zx\n", thr->tid,
1034 atomic_load_relaxed(&thr->trace_pos));
1037 void ThreadIgnoreBegin(ThreadState* thr, uptr pc) {
1038 DPrintf("#%d: ThreadIgnoreBegin\n", thr->tid);
1039 thr->ignore_reads_and_writes++;
1040 CHECK_GT(thr->ignore_reads_and_writes, 0);
1041 thr->fast_state.SetIgnoreBit();
1044 thr->mop_ignore_set.Add(CurrentStackId(thr, pc));
1048 void ThreadIgnoreEnd(ThreadState *thr) {
1049 DPrintf("#%d: ThreadIgnoreEnd\n", thr->tid);
1050 CHECK_GT(thr->ignore_reads_and_writes, 0);
1051 thr->ignore_reads_and_writes--;
1052 if (thr->ignore_reads_and_writes == 0) {
1053 thr->fast_state.ClearIgnoreBit();
1055 thr->mop_ignore_set.Reset();
1063 ThreadState *thr = cur_thread();
1064 return thr->shadow_stack_pos - thr->shadow_stack;
1068 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc) {
1069 DPrintf("#%d: ThreadIgnoreSyncBegin\n", thr->tid);
1070 thr->ignore_sync++;
1071 CHECK_GT(thr->ignore_sync, 0);
1074 thr->sync_ignore_set.Add(CurrentStackId(thr, pc));
1078 void ThreadIgnoreSyncEnd(ThreadState *thr) {
1079 DPrintf("#%d: ThreadIgnoreSyncEnd\n", thr->tid);
1080 CHECK_GT(thr->ignore_sync, 0);
1081 thr->ignore_sync--;
1083 if (thr->ignore_sync == 0)
1084 thr->sync_ignore_set.Reset();