xref: /llvm-project/compiler-rt/lib/nsan/nsan_allocator.cpp (revision 9a156f6b2b0c892d8713ba907f07f027b24953d8)
1 //===- nsan_allocator.cpp -------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // NumericalStabilitySanitizer allocator.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "nsan_allocator.h"
14 #include "interception/interception.h"
15 #include "nsan.h"
16 #include "nsan_flags.h"
17 #include "nsan_platform.h"
18 #include "nsan_thread.h"
19 #include "sanitizer_common/sanitizer_allocator.h"
20 #include "sanitizer_common/sanitizer_allocator_checks.h"
21 #include "sanitizer_common/sanitizer_allocator_interface.h"
22 #include "sanitizer_common/sanitizer_allocator_report.h"
23 #include "sanitizer_common/sanitizer_common.h"
24 #include "sanitizer_common/sanitizer_errno.h"
25 
26 using namespace __nsan;
27 
28 DECLARE_REAL(void *, memcpy, void *dest, const void *src, SIZE_T n)
29 DECLARE_REAL(void *, memset, void *dest, int c, SIZE_T n)
30 
31 namespace {
32 struct Metadata {
33   uptr requested_size;
34 };
35 
36 struct NsanMapUnmapCallback {
37   void OnMap(uptr p, uptr size) const {}
38   void OnMapSecondary(uptr p, uptr size, uptr user_begin,
39                       uptr user_size) const {}
40   void OnUnmap(uptr p, uptr size) const {}
41 };
42 
43 const uptr kMaxAllowedMallocSize = 1ULL << 40;
44 
45 // Allocator64 parameters. Deliberately using a short name.
46 struct AP64 {
47   static const uptr kSpaceBeg = Mapping::kHeapMemBeg;
48   static const uptr kSpaceSize = 0x40000000000; // 4T.
49   static const uptr kMetadataSize = sizeof(Metadata);
50   using SizeClassMap = DefaultSizeClassMap;
51   using MapUnmapCallback = NsanMapUnmapCallback;
52   static const uptr kFlags = 0;
53   using AddressSpaceView = LocalAddressSpaceView;
54 };
55 } // namespace
56 
57 using PrimaryAllocator = SizeClassAllocator64<AP64>;
58 using Allocator = CombinedAllocator<PrimaryAllocator>;
59 using AllocatorCache = Allocator::AllocatorCache;
60 
61 static Allocator allocator;
62 static AllocatorCache fallback_allocator_cache;
63 static StaticSpinMutex fallback_mutex;
64 
65 static uptr max_malloc_size;
66 
67 void __nsan::NsanAllocatorInit() {
68   SetAllocatorMayReturnNull(common_flags()->allocator_may_return_null);
69   allocator.Init(common_flags()->allocator_release_to_os_interval_ms);
70   if (common_flags()->max_allocation_size_mb)
71     max_malloc_size = Min(common_flags()->max_allocation_size_mb << 20,
72                           kMaxAllowedMallocSize);
73   else
74     max_malloc_size = kMaxAllowedMallocSize;
75 }
76 
77 static AllocatorCache *GetAllocatorCache(NsanThreadLocalMallocStorage *ms) {
78   CHECK_LE(sizeof(AllocatorCache), sizeof(ms->allocator_cache));
79   return reinterpret_cast<AllocatorCache *>(ms->allocator_cache);
80 }
81 
82 void NsanThreadLocalMallocStorage::Init() {
83   allocator.InitCache(GetAllocatorCache(this));
84 }
85 
86 void NsanThreadLocalMallocStorage::CommitBack() {
87   allocator.SwallowCache(GetAllocatorCache(this));
88   allocator.DestroyCache(GetAllocatorCache(this));
89 }
90 
91 static void *NsanAllocate(uptr size, uptr alignment, bool zero) {
92   if (UNLIKELY(size > max_malloc_size)) {
93     if (AllocatorMayReturnNull()) {
94       Report("WARNING: NumericalStabilitySanitizer failed to allocate 0x%zx "
95              "bytes\n",
96              size);
97       return nullptr;
98     }
99     BufferedStackTrace stack;
100     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
101     ReportAllocationSizeTooBig(size, max_malloc_size, &stack);
102   }
103   if (UNLIKELY(IsRssLimitExceeded())) {
104     if (AllocatorMayReturnNull())
105       return nullptr;
106     BufferedStackTrace stack;
107     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
108     ReportRssLimitExceeded(&stack);
109   }
110 
111   void *allocated;
112   if (NsanThread *t = GetCurrentThread()) {
113     AllocatorCache *cache = GetAllocatorCache(&t->malloc_storage());
114     allocated = allocator.Allocate(cache, size, alignment);
115   } else {
116     SpinMutexLock l(&fallback_mutex);
117     AllocatorCache *cache = &fallback_allocator_cache;
118     allocated = allocator.Allocate(cache, size, alignment);
119   }
120   if (UNLIKELY(!allocated)) {
121     SetAllocatorOutOfMemory();
122     if (AllocatorMayReturnNull())
123       return nullptr;
124     BufferedStackTrace stack;
125     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
126     ReportOutOfMemory(size, &stack);
127   }
128   auto *meta = reinterpret_cast<Metadata *>(allocator.GetMetaData(allocated));
129   meta->requested_size = size;
130   if (zero && allocator.FromPrimary(allocated))
131     REAL(memset)(allocated, 0, size);
132   __nsan_set_value_unknown(allocated, size);
133   RunMallocHooks(allocated, size);
134   return allocated;
135 }
136 
137 void __nsan::NsanDeallocate(void *p) {
138   DCHECK(p);
139   RunFreeHooks(p);
140   auto *meta = reinterpret_cast<Metadata *>(allocator.GetMetaData(p));
141   uptr size = meta->requested_size;
142   meta->requested_size = 0;
143   if (flags().poison_in_free)
144     __nsan_set_value_unknown(p, size);
145   if (NsanThread *t = GetCurrentThread()) {
146     AllocatorCache *cache = GetAllocatorCache(&t->malloc_storage());
147     allocator.Deallocate(cache, p);
148   } else {
149     // In a just created thread, glibc's _dl_deallocate_tls might reach here
150     // before nsan_current_thread is set.
151     SpinMutexLock l(&fallback_mutex);
152     AllocatorCache *cache = &fallback_allocator_cache;
153     allocator.Deallocate(cache, p);
154   }
155 }
156 
157 static void *NsanReallocate(void *ptr, uptr new_size, uptr alignment) {
158   Metadata *meta = reinterpret_cast<Metadata *>(allocator.GetMetaData(ptr));
159   uptr old_size = meta->requested_size;
160   uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(ptr);
161   if (new_size <= actually_allocated_size) {
162     // We are not reallocating here.
163     meta->requested_size = new_size;
164     if (new_size > old_size)
165       __nsan_set_value_unknown((u8 *)ptr + old_size, new_size - old_size);
166     return ptr;
167   }
168   void *new_p = NsanAllocate(new_size, alignment, false);
169   if (new_p) {
170     uptr memcpy_size = Min(new_size, old_size);
171     REAL(memcpy)(new_p, ptr, memcpy_size);
172     __nsan_copy_values(new_p, ptr, memcpy_size);
173     NsanDeallocate(ptr);
174   }
175   return new_p;
176 }
177 
178 static void *NsanCalloc(uptr nmemb, uptr size) {
179   if (UNLIKELY(CheckForCallocOverflow(size, nmemb))) {
180     if (AllocatorMayReturnNull())
181       return nullptr;
182     BufferedStackTrace stack;
183     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
184     ReportCallocOverflow(nmemb, size, &stack);
185   }
186   return NsanAllocate(nmemb * size, sizeof(u64), true);
187 }
188 
189 static const void *AllocationBegin(const void *p) {
190   if (!p)
191     return nullptr;
192   void *beg = allocator.GetBlockBegin(p);
193   if (!beg)
194     return nullptr;
195   auto *b = reinterpret_cast<Metadata *>(allocator.GetMetaData(beg));
196   if (!b)
197     return nullptr;
198   if (b->requested_size == 0)
199     return nullptr;
200 
201   return beg;
202 }
203 
204 static uptr AllocationSizeFast(const void *p) {
205   return reinterpret_cast<Metadata *>(allocator.GetMetaData(p))->requested_size;
206 }
207 
208 static uptr AllocationSize(const void *p) {
209   if (!p)
210     return 0;
211   if (allocator.GetBlockBegin(p) != p)
212     return 0;
213   return AllocationSizeFast(p);
214 }
215 
216 void *__nsan::nsan_malloc(uptr size) {
217   return SetErrnoOnNull(NsanAllocate(size, sizeof(u64), false));
218 }
219 
220 void *__nsan::nsan_calloc(uptr nmemb, uptr size) {
221   return SetErrnoOnNull(NsanCalloc(nmemb, size));
222 }
223 
224 void *__nsan::nsan_realloc(void *ptr, uptr size) {
225   if (!ptr)
226     return SetErrnoOnNull(NsanAllocate(size, sizeof(u64), false));
227   if (size == 0) {
228     NsanDeallocate(ptr);
229     return nullptr;
230   }
231   return SetErrnoOnNull(NsanReallocate(ptr, size, sizeof(u64)));
232 }
233 
234 void *__nsan::nsan_reallocarray(void *ptr, uptr nmemb, uptr size) {
235   if (UNLIKELY(CheckForCallocOverflow(size, nmemb))) {
236     errno = errno_ENOMEM;
237     if (AllocatorMayReturnNull())
238       return nullptr;
239     BufferedStackTrace stack;
240     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
241     ReportReallocArrayOverflow(nmemb, size, &stack);
242   }
243   return nsan_realloc(ptr, nmemb * size);
244 }
245 
246 void *__nsan::nsan_valloc(uptr size) {
247   return SetErrnoOnNull(NsanAllocate(size, GetPageSizeCached(), false));
248 }
249 
250 void *__nsan::nsan_pvalloc(uptr size) {
251   uptr PageSize = GetPageSizeCached();
252   if (UNLIKELY(CheckForPvallocOverflow(size, PageSize))) {
253     errno = errno_ENOMEM;
254     if (AllocatorMayReturnNull())
255       return nullptr;
256     BufferedStackTrace stack;
257     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
258     ReportPvallocOverflow(size, &stack);
259   }
260   // pvalloc(0) should allocate one page.
261   size = size ? RoundUpTo(size, PageSize) : PageSize;
262   return SetErrnoOnNull(NsanAllocate(size, PageSize, false));
263 }
264 
265 void *__nsan::nsan_aligned_alloc(uptr alignment, uptr size) {
266   if (UNLIKELY(!CheckAlignedAllocAlignmentAndSize(alignment, size))) {
267     errno = errno_EINVAL;
268     if (AllocatorMayReturnNull())
269       return nullptr;
270     BufferedStackTrace stack;
271     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
272     ReportInvalidAlignedAllocAlignment(size, alignment, &stack);
273   }
274   return SetErrnoOnNull(NsanAllocate(size, alignment, false));
275 }
276 
277 void *__nsan::nsan_memalign(uptr alignment, uptr size) {
278   if (UNLIKELY(!IsPowerOfTwo(alignment))) {
279     errno = errno_EINVAL;
280     if (AllocatorMayReturnNull())
281       return nullptr;
282     BufferedStackTrace stack;
283     GET_FATAL_STACK_TRACE_IF_EMPTY(&stack);
284     ReportInvalidAllocationAlignment(alignment, &stack);
285   }
286   return SetErrnoOnNull(NsanAllocate(size, alignment, false));
287 }
288 
289 int __nsan::nsan_posix_memalign(void **memptr, uptr alignment, uptr size) {
290   if (UNLIKELY(!CheckPosixMemalignAlignment(alignment))) {
291     if (AllocatorMayReturnNull())
292       return errno_EINVAL;
293     BufferedStackTrace stack;
294     ReportInvalidPosixMemalignAlignment(alignment, &stack);
295   }
296   void *ptr = NsanAllocate(size, alignment, false);
297   if (UNLIKELY(!ptr))
298     // OOM error is already taken care of by NsanAllocate.
299     return errno_ENOMEM;
300   DCHECK(IsAligned((uptr)ptr, alignment));
301   *memptr = ptr;
302   return 0;
303 }
304 
305 extern "C" {
306 uptr __sanitizer_get_current_allocated_bytes() {
307   uptr stats[AllocatorStatCount];
308   allocator.GetStats(stats);
309   return stats[AllocatorStatAllocated];
310 }
311 
312 uptr __sanitizer_get_heap_size() {
313   uptr stats[AllocatorStatCount];
314   allocator.GetStats(stats);
315   return stats[AllocatorStatMapped];
316 }
317 
318 uptr __sanitizer_get_free_bytes() { return 1; }
319 
320 uptr __sanitizer_get_unmapped_bytes() { return 1; }
321 
322 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
323 
324 int __sanitizer_get_ownership(const void *p) { return AllocationSize(p) != 0; }
325 
326 const void *__sanitizer_get_allocated_begin(const void *p) {
327   return AllocationBegin(p);
328 }
329 
330 uptr __sanitizer_get_allocated_size(const void *p) { return AllocationSize(p); }
331 
332 uptr __sanitizer_get_allocated_size_fast(const void *p) {
333   DCHECK_EQ(p, __sanitizer_get_allocated_begin(p));
334   uptr ret = AllocationSizeFast(p);
335   DCHECK_EQ(ret, __sanitizer_get_allocated_size(p));
336   return ret;
337 }
338 
339 void __sanitizer_purge_allocator() { allocator.ForceReleaseToOS(); }
340 }
341