xref: /openbsd-src/gnu/llvm/compiler-rt/lib/hwasan/hwasan_thread.cpp (revision 810390e339a5425391477d5d41c78d7cab2424ac)
13cab2bb3Spatrick 
23cab2bb3Spatrick #include "hwasan_thread.h"
33cab2bb3Spatrick 
4*810390e3Srobert #include "hwasan.h"
5*810390e3Srobert #include "hwasan_interface_internal.h"
6*810390e3Srobert #include "hwasan_mapping.h"
7*810390e3Srobert #include "hwasan_poisoning.h"
8*810390e3Srobert #include "hwasan_thread_list.h"
9*810390e3Srobert #include "sanitizer_common/sanitizer_atomic.h"
103cab2bb3Spatrick #include "sanitizer_common/sanitizer_file.h"
113cab2bb3Spatrick #include "sanitizer_common/sanitizer_placement_new.h"
123cab2bb3Spatrick #include "sanitizer_common/sanitizer_tls_get_addr.h"
133cab2bb3Spatrick 
143cab2bb3Spatrick namespace __hwasan {
153cab2bb3Spatrick 
RandomSeed()163cab2bb3Spatrick static u32 RandomSeed() {
173cab2bb3Spatrick   u32 seed;
183cab2bb3Spatrick   do {
193cab2bb3Spatrick     if (UNLIKELY(!GetRandom(reinterpret_cast<void *>(&seed), sizeof(seed),
203cab2bb3Spatrick                             /*blocking=*/false))) {
213cab2bb3Spatrick       seed = static_cast<u32>(
223cab2bb3Spatrick           (NanoTime() >> 12) ^
233cab2bb3Spatrick           (reinterpret_cast<uptr>(__builtin_frame_address(0)) >> 4));
243cab2bb3Spatrick     }
253cab2bb3Spatrick   } while (!seed);
263cab2bb3Spatrick   return seed;
273cab2bb3Spatrick }
283cab2bb3Spatrick 
InitRandomState()293cab2bb3Spatrick void Thread::InitRandomState() {
303cab2bb3Spatrick   random_state_ = flags()->random_tags ? RandomSeed() : unique_id_;
31*810390e3Srobert   random_state_inited_ = true;
323cab2bb3Spatrick 
333cab2bb3Spatrick   // Push a random number of zeros onto the ring buffer so that the first stack
343cab2bb3Spatrick   // tag base will be random.
353cab2bb3Spatrick   for (tag_t i = 0, e = GenerateRandomTag(); i != e; ++i)
363cab2bb3Spatrick     stack_allocations_->push(0);
373cab2bb3Spatrick }
383cab2bb3Spatrick 
Init(uptr stack_buffer_start,uptr stack_buffer_size,const InitState * state)39d89ec533Spatrick void Thread::Init(uptr stack_buffer_start, uptr stack_buffer_size,
40d89ec533Spatrick                   const InitState *state) {
41d89ec533Spatrick   CHECK_EQ(0, unique_id_);  // try to catch bad stack reuse
42d89ec533Spatrick   CHECK_EQ(0, stack_top_);
43d89ec533Spatrick   CHECK_EQ(0, stack_bottom_);
44d89ec533Spatrick 
45*810390e3Srobert   static atomic_uint64_t unique_id;
46*810390e3Srobert   unique_id_ = atomic_fetch_add(&unique_id, 1, memory_order_relaxed);
47*810390e3Srobert 
483cab2bb3Spatrick   if (auto sz = flags()->heap_history_size)
493cab2bb3Spatrick     heap_allocations_ = HeapAllocationsRingBuffer::New(sz);
503cab2bb3Spatrick 
51d89ec533Spatrick #if !SANITIZER_FUCHSIA
52d89ec533Spatrick   // Do not initialize the stack ring buffer just yet on Fuchsia. Threads will
53d89ec533Spatrick   // be initialized before we enter the thread itself, so we will instead call
54d89ec533Spatrick   // this later.
55d89ec533Spatrick   InitStackRingBuffer(stack_buffer_start, stack_buffer_size);
56d89ec533Spatrick #endif
57*810390e3Srobert   InitStackAndTls(state);
58*810390e3Srobert   dtls_ = DTLS_Get();
59d89ec533Spatrick }
60d89ec533Spatrick 
InitStackRingBuffer(uptr stack_buffer_start,uptr stack_buffer_size)61d89ec533Spatrick void Thread::InitStackRingBuffer(uptr stack_buffer_start,
62d89ec533Spatrick                                  uptr stack_buffer_size) {
633cab2bb3Spatrick   HwasanTSDThreadInit();  // Only needed with interceptors.
643cab2bb3Spatrick   uptr *ThreadLong = GetCurrentThreadLongPtr();
653cab2bb3Spatrick   // The following implicitly sets (this) as the current thread.
663cab2bb3Spatrick   stack_allocations_ = new (ThreadLong)
673cab2bb3Spatrick       StackAllocationsRingBuffer((void *)stack_buffer_start, stack_buffer_size);
683cab2bb3Spatrick   // Check that it worked.
693cab2bb3Spatrick   CHECK_EQ(GetCurrentThread(), this);
703cab2bb3Spatrick 
713cab2bb3Spatrick   // ScopedTaggingDisable needs GetCurrentThread to be set up.
723cab2bb3Spatrick   ScopedTaggingDisabler disabler;
733cab2bb3Spatrick 
743cab2bb3Spatrick   if (stack_bottom_) {
753cab2bb3Spatrick     int local;
763cab2bb3Spatrick     CHECK(AddrIsInStack((uptr)&local));
773cab2bb3Spatrick     CHECK(MemIsApp(stack_bottom_));
783cab2bb3Spatrick     CHECK(MemIsApp(stack_top_ - 1));
793cab2bb3Spatrick   }
803cab2bb3Spatrick 
813cab2bb3Spatrick   if (flags()->verbose_threads) {
823cab2bb3Spatrick     if (IsMainThread()) {
833cab2bb3Spatrick       Printf("sizeof(Thread): %zd sizeof(HeapRB): %zd sizeof(StackRB): %zd\n",
843cab2bb3Spatrick              sizeof(Thread), heap_allocations_->SizeInBytes(),
853cab2bb3Spatrick              stack_allocations_->size() * sizeof(uptr));
863cab2bb3Spatrick     }
873cab2bb3Spatrick     Print("Creating  : ");
883cab2bb3Spatrick   }
893cab2bb3Spatrick }
903cab2bb3Spatrick 
ClearShadowForThreadStackAndTLS()913cab2bb3Spatrick void Thread::ClearShadowForThreadStackAndTLS() {
923cab2bb3Spatrick   if (stack_top_ != stack_bottom_)
933cab2bb3Spatrick     TagMemory(stack_bottom_, stack_top_ - stack_bottom_, 0);
943cab2bb3Spatrick   if (tls_begin_ != tls_end_)
953cab2bb3Spatrick     TagMemory(tls_begin_, tls_end_ - tls_begin_, 0);
963cab2bb3Spatrick }
973cab2bb3Spatrick 
Destroy()983cab2bb3Spatrick void Thread::Destroy() {
993cab2bb3Spatrick   if (flags()->verbose_threads)
1003cab2bb3Spatrick     Print("Destroying: ");
1013cab2bb3Spatrick   AllocatorSwallowThreadLocalCache(allocator_cache());
1023cab2bb3Spatrick   ClearShadowForThreadStackAndTLS();
1033cab2bb3Spatrick   if (heap_allocations_)
1043cab2bb3Spatrick     heap_allocations_->Delete();
1053cab2bb3Spatrick   DTLS_Destroy();
1061f9cb04fSpatrick   // Unregister this as the current thread.
1071f9cb04fSpatrick   // Instrumented code can not run on this thread from this point onwards, but
1081f9cb04fSpatrick   // malloc/free can still be served. Glibc may call free() very late, after all
1091f9cb04fSpatrick   // TSD destructors are done.
1101f9cb04fSpatrick   CHECK_EQ(GetCurrentThread(), this);
1111f9cb04fSpatrick   *GetCurrentThreadLongPtr() = 0;
1123cab2bb3Spatrick }
1133cab2bb3Spatrick 
Print(const char * Prefix)1143cab2bb3Spatrick void Thread::Print(const char *Prefix) {
115*810390e3Srobert   Printf("%sT%zd %p stack: [%p,%p) sz: %zd tls: [%p,%p)\n", Prefix, unique_id_,
116*810390e3Srobert          (void *)this, stack_bottom(), stack_top(),
117*810390e3Srobert          stack_top() - stack_bottom(), tls_begin(), tls_end());
1183cab2bb3Spatrick }
1193cab2bb3Spatrick 
xorshift(u32 state)1203cab2bb3Spatrick static u32 xorshift(u32 state) {
1213cab2bb3Spatrick   state ^= state << 13;
1223cab2bb3Spatrick   state ^= state >> 17;
1233cab2bb3Spatrick   state ^= state << 5;
1243cab2bb3Spatrick   return state;
1253cab2bb3Spatrick }
1263cab2bb3Spatrick 
1273cab2bb3Spatrick // Generate a (pseudo-)random non-zero tag.
GenerateRandomTag(uptr num_bits)128d89ec533Spatrick tag_t Thread::GenerateRandomTag(uptr num_bits) {
129d89ec533Spatrick   DCHECK_GT(num_bits, 0);
130*810390e3Srobert   if (tagging_disabled_)
131*810390e3Srobert     return 0;
1323cab2bb3Spatrick   tag_t tag;
133d89ec533Spatrick   const uptr tag_mask = (1ULL << num_bits) - 1;
1343cab2bb3Spatrick   do {
1353cab2bb3Spatrick     if (flags()->random_tags) {
136*810390e3Srobert       if (!random_buffer_) {
137*810390e3Srobert         EnsureRandomStateInited();
1383cab2bb3Spatrick         random_buffer_ = random_state_ = xorshift(random_state_);
139*810390e3Srobert       }
1403cab2bb3Spatrick       CHECK(random_buffer_);
141d89ec533Spatrick       tag = random_buffer_ & tag_mask;
142d89ec533Spatrick       random_buffer_ >>= num_bits;
1433cab2bb3Spatrick     } else {
144*810390e3Srobert       EnsureRandomStateInited();
145d89ec533Spatrick       random_state_ += 1;
146d89ec533Spatrick       tag = random_state_ & tag_mask;
1473cab2bb3Spatrick     }
1483cab2bb3Spatrick   } while (!tag);
1493cab2bb3Spatrick   return tag;
1503cab2bb3Spatrick }
1513cab2bb3Spatrick 
1523cab2bb3Spatrick } // namespace __hwasan
153*810390e3Srobert 
154*810390e3Srobert // --- Implementation of LSan-specific functions --- {{{1
155*810390e3Srobert namespace __lsan {
156*810390e3Srobert 
GetHwasanThreadListLocked()157*810390e3Srobert static __hwasan::HwasanThreadList *GetHwasanThreadListLocked() {
158*810390e3Srobert   auto &tl = __hwasan::hwasanThreadList();
159*810390e3Srobert   tl.CheckLocked();
160*810390e3Srobert   return &tl;
161*810390e3Srobert }
162*810390e3Srobert 
GetThreadByOsIDLocked(tid_t os_id)163*810390e3Srobert static __hwasan::Thread *GetThreadByOsIDLocked(tid_t os_id) {
164*810390e3Srobert   return GetHwasanThreadListLocked()->FindThreadLocked(
165*810390e3Srobert       [os_id](__hwasan::Thread *t) { return t->os_id() == os_id; });
166*810390e3Srobert }
167*810390e3Srobert 
LockThreadRegistry()168*810390e3Srobert void LockThreadRegistry() { __hwasan::hwasanThreadList().Lock(); }
169*810390e3Srobert 
UnlockThreadRegistry()170*810390e3Srobert void UnlockThreadRegistry() { __hwasan::hwasanThreadList().Unlock(); }
171*810390e3Srobert 
EnsureMainThreadIDIsCorrect()172*810390e3Srobert void EnsureMainThreadIDIsCorrect() {
173*810390e3Srobert   auto *t = __hwasan::GetCurrentThread();
174*810390e3Srobert   if (t && (t->IsMainThread()))
175*810390e3Srobert     t->set_os_id(GetTid());
176*810390e3Srobert }
177*810390e3Srobert 
GetThreadRangesLocked(tid_t os_id,uptr * stack_begin,uptr * stack_end,uptr * tls_begin,uptr * tls_end,uptr * cache_begin,uptr * cache_end,DTLS ** dtls)178*810390e3Srobert bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
179*810390e3Srobert                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
180*810390e3Srobert                            uptr *cache_end, DTLS **dtls) {
181*810390e3Srobert   auto *t = GetThreadByOsIDLocked(os_id);
182*810390e3Srobert   if (!t)
183*810390e3Srobert     return false;
184*810390e3Srobert   *stack_begin = t->stack_bottom();
185*810390e3Srobert   *stack_end = t->stack_top();
186*810390e3Srobert   *tls_begin = t->tls_begin();
187*810390e3Srobert   *tls_end = t->tls_end();
188*810390e3Srobert   // Fixme: is this correct for HWASan.
189*810390e3Srobert   *cache_begin = 0;
190*810390e3Srobert   *cache_end = 0;
191*810390e3Srobert   *dtls = t->dtls();
192*810390e3Srobert   return true;
193*810390e3Srobert }
194*810390e3Srobert 
GetAllThreadAllocatorCachesLocked(InternalMmapVector<uptr> * caches)195*810390e3Srobert void GetAllThreadAllocatorCachesLocked(InternalMmapVector<uptr> *caches) {}
196*810390e3Srobert 
GetThreadExtraStackRangesLocked(tid_t os_id,InternalMmapVector<Range> * ranges)197*810390e3Srobert void GetThreadExtraStackRangesLocked(tid_t os_id,
198*810390e3Srobert                                      InternalMmapVector<Range> *ranges) {}
GetThreadExtraStackRangesLocked(InternalMmapVector<Range> * ranges)199*810390e3Srobert void GetThreadExtraStackRangesLocked(InternalMmapVector<Range> *ranges) {}
200*810390e3Srobert 
GetAdditionalThreadContextPtrsLocked(InternalMmapVector<uptr> * ptrs)201*810390e3Srobert void GetAdditionalThreadContextPtrsLocked(InternalMmapVector<uptr> *ptrs) {}
GetRunningThreadsLocked(InternalMmapVector<tid_t> * threads)202*810390e3Srobert void GetRunningThreadsLocked(InternalMmapVector<tid_t> *threads) {}
203*810390e3Srobert 
204*810390e3Srobert }  // namespace __lsan
205