xref: /llvm-project/compiler-rt/lib/asan/asan_poisoning.cpp (revision c76045d9bf3bd1c7a381dc85d1db63a38fd69aa4)
1217222abSNico Weber //===-- asan_poisoning.cpp ------------------------------------------------===//
2217222abSNico Weber //
3217222abSNico Weber // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4217222abSNico Weber // See https://llvm.org/LICENSE.txt for license information.
5217222abSNico Weber // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6217222abSNico Weber //
7217222abSNico Weber //===----------------------------------------------------------------------===//
8217222abSNico Weber //
9217222abSNico Weber // This file is a part of AddressSanitizer, an address sanity checker.
10217222abSNico Weber //
11217222abSNico Weber // Shadow memory poisoning by ASan RTL and by user application.
12217222abSNico Weber //===----------------------------------------------------------------------===//
13217222abSNico Weber 
14217222abSNico Weber #include "asan_poisoning.h"
15595d340dSDmitry Vyukov 
16217222abSNico Weber #include "asan_report.h"
17217222abSNico Weber #include "asan_stack.h"
18217222abSNico Weber #include "sanitizer_common/sanitizer_atomic.h"
19*c76045d9STacet #include "sanitizer_common/sanitizer_common.h"
20217222abSNico Weber #include "sanitizer_common/sanitizer_flags.h"
21595d340dSDmitry Vyukov #include "sanitizer_common/sanitizer_interface_internal.h"
22595d340dSDmitry Vyukov #include "sanitizer_common/sanitizer_libc.h"
23217222abSNico Weber 
24217222abSNico Weber namespace __asan {
25217222abSNico Weber 
26217222abSNico Weber static atomic_uint8_t can_poison_memory;
27217222abSNico Weber 
28217222abSNico Weber void SetCanPoisonMemory(bool value) {
29217222abSNico Weber   atomic_store(&can_poison_memory, value, memory_order_release);
30217222abSNico Weber }
31217222abSNico Weber 
32217222abSNico Weber bool CanPoisonMemory() {
33217222abSNico Weber   return atomic_load(&can_poison_memory, memory_order_acquire);
34217222abSNico Weber }
35217222abSNico Weber 
36217222abSNico Weber void PoisonShadow(uptr addr, uptr size, u8 value) {
37217222abSNico Weber   if (value && !CanPoisonMemory()) return;
38217222abSNico Weber   CHECK(AddrIsAlignedByGranularity(addr));
39217222abSNico Weber   CHECK(AddrIsInMem(addr));
40217222abSNico Weber   CHECK(AddrIsAlignedByGranularity(addr + size));
41ad56941aSKirill Stoimenov   CHECK(AddrIsInMem(addr + size - ASAN_SHADOW_GRANULARITY));
42217222abSNico Weber   CHECK(REAL(memset));
43217222abSNico Weber   FastPoisonShadow(addr, size, value);
44217222abSNico Weber }
45217222abSNico Weber 
46217222abSNico Weber void PoisonShadowPartialRightRedzone(uptr addr,
47217222abSNico Weber                                      uptr size,
48217222abSNico Weber                                      uptr redzone_size,
49217222abSNico Weber                                      u8 value) {
50217222abSNico Weber   if (!CanPoisonMemory()) return;
51217222abSNico Weber   CHECK(AddrIsAlignedByGranularity(addr));
52217222abSNico Weber   CHECK(AddrIsInMem(addr));
53217222abSNico Weber   FastPoisonShadowPartialRightRedzone(addr, size, redzone_size, value);
54217222abSNico Weber }
55217222abSNico Weber 
56217222abSNico Weber struct ShadowSegmentEndpoint {
57217222abSNico Weber   u8 *chunk;
58ad56941aSKirill Stoimenov   s8 offset;  // in [0, ASAN_SHADOW_GRANULARITY)
59217222abSNico Weber   s8 value;  // = *chunk;
60217222abSNico Weber 
61217222abSNico Weber   explicit ShadowSegmentEndpoint(uptr address) {
62217222abSNico Weber     chunk = (u8*)MemToShadow(address);
63ad56941aSKirill Stoimenov     offset = address & (ASAN_SHADOW_GRANULARITY - 1);
64217222abSNico Weber     value = *chunk;
65217222abSNico Weber   }
66217222abSNico Weber };
67217222abSNico Weber 
68217222abSNico Weber void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) {
69217222abSNico Weber   uptr end = ptr + size;
70217222abSNico Weber   if (Verbosity()) {
71217222abSNico Weber     Printf("__asan_%spoison_intra_object_redzone [%p,%p) %zd\n",
72629b40daSMartin Liska            poison ? "" : "un", (void *)ptr, (void *)end, size);
73217222abSNico Weber     if (Verbosity() >= 2)
74217222abSNico Weber       PRINT_CURRENT_STACK();
75217222abSNico Weber   }
76217222abSNico Weber   CHECK(size);
77217222abSNico Weber   CHECK_LE(size, 4096);
78ad56941aSKirill Stoimenov   CHECK(IsAligned(end, ASAN_SHADOW_GRANULARITY));
79ad56941aSKirill Stoimenov   if (!IsAligned(ptr, ASAN_SHADOW_GRANULARITY)) {
80217222abSNico Weber     *(u8 *)MemToShadow(ptr) =
81ad56941aSKirill Stoimenov         poison ? static_cast<u8>(ptr % ASAN_SHADOW_GRANULARITY) : 0;
82ad56941aSKirill Stoimenov     ptr |= ASAN_SHADOW_GRANULARITY - 1;
83217222abSNico Weber     ptr++;
84217222abSNico Weber   }
85ad56941aSKirill Stoimenov   for (; ptr < end; ptr += ASAN_SHADOW_GRANULARITY)
86217222abSNico Weber     *(u8*)MemToShadow(ptr) = poison ? kAsanIntraObjectRedzone : 0;
87217222abSNico Weber }
88217222abSNico Weber 
89217222abSNico Weber }  // namespace __asan
90217222abSNico Weber 
91217222abSNico Weber // ---------------------- Interface ---------------- {{{1
92c0fa6322SVitaly Buka using namespace __asan;
93217222abSNico Weber 
94217222abSNico Weber // Current implementation of __asan_(un)poison_memory_region doesn't check
95217222abSNico Weber // that user program (un)poisons the memory it owns. It poisons memory
96217222abSNico Weber // conservatively, and unpoisons progressively to make sure asan shadow
97217222abSNico Weber // mapping invariant is preserved (see detailed mapping description here:
98217222abSNico Weber // https://github.com/google/sanitizers/wiki/AddressSanitizerAlgorithm).
99217222abSNico Weber //
100217222abSNico Weber // * if user asks to poison region [left, right), the program poisons
101217222abSNico Weber // at least [left, AlignDown(right)).
102217222abSNico Weber // * if user asks to unpoison region [left, right), the program unpoisons
103217222abSNico Weber // at most [AlignDown(left), right).
104217222abSNico Weber void __asan_poison_memory_region(void const volatile *addr, uptr size) {
105217222abSNico Weber   if (!flags()->allow_user_poisoning || size == 0) return;
106217222abSNico Weber   uptr beg_addr = (uptr)addr;
107217222abSNico Weber   uptr end_addr = beg_addr + size;
108217222abSNico Weber   VPrintf(3, "Trying to poison memory region [%p, %p)\n", (void *)beg_addr,
109217222abSNico Weber           (void *)end_addr);
110217222abSNico Weber   ShadowSegmentEndpoint beg(beg_addr);
111217222abSNico Weber   ShadowSegmentEndpoint end(end_addr);
112217222abSNico Weber   if (beg.chunk == end.chunk) {
113217222abSNico Weber     CHECK_LT(beg.offset, end.offset);
114217222abSNico Weber     s8 value = beg.value;
115217222abSNico Weber     CHECK_EQ(value, end.value);
116217222abSNico Weber     // We can only poison memory if the byte in end.offset is unaddressable.
117217222abSNico Weber     // No need to re-poison memory if it is poisoned already.
118217222abSNico Weber     if (value > 0 && value <= end.offset) {
119217222abSNico Weber       if (beg.offset > 0) {
120217222abSNico Weber         *beg.chunk = Min(value, beg.offset);
121217222abSNico Weber       } else {
122217222abSNico Weber         *beg.chunk = kAsanUserPoisonedMemoryMagic;
123217222abSNico Weber       }
124217222abSNico Weber     }
125217222abSNico Weber     return;
126217222abSNico Weber   }
127217222abSNico Weber   CHECK_LT(beg.chunk, end.chunk);
128217222abSNico Weber   if (beg.offset > 0) {
129217222abSNico Weber     // Mark bytes from beg.offset as unaddressable.
130217222abSNico Weber     if (beg.value == 0) {
131217222abSNico Weber       *beg.chunk = beg.offset;
132217222abSNico Weber     } else {
133217222abSNico Weber       *beg.chunk = Min(beg.value, beg.offset);
134217222abSNico Weber     }
135217222abSNico Weber     beg.chunk++;
136217222abSNico Weber   }
137217222abSNico Weber   REAL(memset)(beg.chunk, kAsanUserPoisonedMemoryMagic, end.chunk - beg.chunk);
138217222abSNico Weber   // Poison if byte in end.offset is unaddressable.
139217222abSNico Weber   if (end.value > 0 && end.value <= end.offset) {
140217222abSNico Weber     *end.chunk = kAsanUserPoisonedMemoryMagic;
141217222abSNico Weber   }
142217222abSNico Weber }
143217222abSNico Weber 
144217222abSNico Weber void __asan_unpoison_memory_region(void const volatile *addr, uptr size) {
145217222abSNico Weber   if (!flags()->allow_user_poisoning || size == 0) return;
146217222abSNico Weber   uptr beg_addr = (uptr)addr;
147217222abSNico Weber   uptr end_addr = beg_addr + size;
148217222abSNico Weber   VPrintf(3, "Trying to unpoison memory region [%p, %p)\n", (void *)beg_addr,
149217222abSNico Weber           (void *)end_addr);
150217222abSNico Weber   ShadowSegmentEndpoint beg(beg_addr);
151217222abSNico Weber   ShadowSegmentEndpoint end(end_addr);
152217222abSNico Weber   if (beg.chunk == end.chunk) {
153217222abSNico Weber     CHECK_LT(beg.offset, end.offset);
154217222abSNico Weber     s8 value = beg.value;
155217222abSNico Weber     CHECK_EQ(value, end.value);
156217222abSNico Weber     // We unpoison memory bytes up to enbytes up to end.offset if it is not
157217222abSNico Weber     // unpoisoned already.
158217222abSNico Weber     if (value != 0) {
159217222abSNico Weber       *beg.chunk = Max(value, end.offset);
160217222abSNico Weber     }
161217222abSNico Weber     return;
162217222abSNico Weber   }
163217222abSNico Weber   CHECK_LT(beg.chunk, end.chunk);
164217222abSNico Weber   REAL(memset)(beg.chunk, 0, end.chunk - beg.chunk);
165217222abSNico Weber   if (end.offset > 0 && end.value != 0) {
166217222abSNico Weber     *end.chunk = Max(end.value, end.offset);
167217222abSNico Weber   }
168217222abSNico Weber }
169217222abSNico Weber 
170217222abSNico Weber int __asan_address_is_poisoned(void const volatile *addr) {
171217222abSNico Weber   return __asan::AddressIsPoisoned((uptr)addr);
172217222abSNico Weber }
173217222abSNico Weber 
174217222abSNico Weber uptr __asan_region_is_poisoned(uptr beg, uptr size) {
175b8919fb0SVitaly Buka   if (!size)
176b8919fb0SVitaly Buka     return 0;
177217222abSNico Weber   uptr end = beg + size;
178b8919fb0SVitaly Buka   if (!AddrIsInMem(beg))
179b8919fb0SVitaly Buka     return beg;
180b8919fb0SVitaly Buka   if (!AddrIsInMem(end))
181b8919fb0SVitaly Buka     return end;
182217222abSNico Weber   CHECK_LT(beg, end);
183ad56941aSKirill Stoimenov   uptr aligned_b = RoundUpTo(beg, ASAN_SHADOW_GRANULARITY);
184ad56941aSKirill Stoimenov   uptr aligned_e = RoundDownTo(end, ASAN_SHADOW_GRANULARITY);
185217222abSNico Weber   uptr shadow_beg = MemToShadow(aligned_b);
186217222abSNico Weber   uptr shadow_end = MemToShadow(aligned_e);
187217222abSNico Weber   // First check the first and the last application bytes,
188ad56941aSKirill Stoimenov   // then check the ASAN_SHADOW_GRANULARITY-aligned region by calling
189217222abSNico Weber   // mem_is_zero on the corresponding shadow.
190b8919fb0SVitaly Buka   if (!__asan::AddressIsPoisoned(beg) && !__asan::AddressIsPoisoned(end - 1) &&
191217222abSNico Weber       (shadow_end <= shadow_beg ||
192217222abSNico Weber        __sanitizer::mem_is_zero((const char *)shadow_beg,
193217222abSNico Weber                                 shadow_end - shadow_beg)))
194217222abSNico Weber     return 0;
195217222abSNico Weber   // The fast check failed, so we have a poisoned byte somewhere.
196217222abSNico Weber   // Find it slowly.
197217222abSNico Weber   for (; beg < end; beg++)
198217222abSNico Weber     if (__asan::AddressIsPoisoned(beg))
199217222abSNico Weber       return beg;
200217222abSNico Weber   UNREACHABLE("mem_is_zero returned false, but poisoned byte was not found");
201217222abSNico Weber   return 0;
202217222abSNico Weber }
203217222abSNico Weber 
204217222abSNico Weber #define CHECK_SMALL_REGION(p, size, isWrite)                  \
205217222abSNico Weber   do {                                                        \
206217222abSNico Weber     uptr __p = reinterpret_cast<uptr>(p);                     \
207217222abSNico Weber     uptr __size = size;                                       \
208217222abSNico Weber     if (UNLIKELY(__asan::AddressIsPoisoned(__p) ||            \
209217222abSNico Weber         __asan::AddressIsPoisoned(__p + __size - 1))) {       \
210217222abSNico Weber       GET_CURRENT_PC_BP_SP;                                   \
211217222abSNico Weber       uptr __bad = __asan_region_is_poisoned(__p, __size);    \
212217222abSNico Weber       __asan_report_error(pc, bp, sp, __bad, isWrite, __size, 0);\
213217222abSNico Weber     }                                                         \
214217222abSNico Weber   } while (false)
215217222abSNico Weber 
216217222abSNico Weber 
217217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
218217222abSNico Weber u16 __sanitizer_unaligned_load16(const uu16 *p) {
219217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), false);
220217222abSNico Weber   return *p;
221217222abSNico Weber }
222217222abSNico Weber 
223217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
224217222abSNico Weber u32 __sanitizer_unaligned_load32(const uu32 *p) {
225217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), false);
226217222abSNico Weber   return *p;
227217222abSNico Weber }
228217222abSNico Weber 
229217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
230217222abSNico Weber u64 __sanitizer_unaligned_load64(const uu64 *p) {
231217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), false);
232217222abSNico Weber   return *p;
233217222abSNico Weber }
234217222abSNico Weber 
235217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
236217222abSNico Weber void __sanitizer_unaligned_store16(uu16 *p, u16 x) {
237217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), true);
238217222abSNico Weber   *p = x;
239217222abSNico Weber }
240217222abSNico Weber 
241217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
242217222abSNico Weber void __sanitizer_unaligned_store32(uu32 *p, u32 x) {
243217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), true);
244217222abSNico Weber   *p = x;
245217222abSNico Weber }
246217222abSNico Weber 
247217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
248217222abSNico Weber void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
249217222abSNico Weber   CHECK_SMALL_REGION(p, sizeof(*p), true);
250217222abSNico Weber   *p = x;
251217222abSNico Weber }
252217222abSNico Weber 
253217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
254217222abSNico Weber void __asan_poison_cxx_array_cookie(uptr p) {
255217222abSNico Weber   if (SANITIZER_WORDSIZE != 64) return;
256217222abSNico Weber   if (!flags()->poison_array_cookie) return;
257217222abSNico Weber   uptr s = MEM_TO_SHADOW(p);
258217222abSNico Weber   *reinterpret_cast<u8*>(s) = kAsanArrayCookieMagic;
259217222abSNico Weber }
260217222abSNico Weber 
261217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
262217222abSNico Weber uptr __asan_load_cxx_array_cookie(uptr *p) {
263217222abSNico Weber   if (SANITIZER_WORDSIZE != 64) return *p;
264217222abSNico Weber   if (!flags()->poison_array_cookie) return *p;
265217222abSNico Weber   uptr s = MEM_TO_SHADOW(reinterpret_cast<uptr>(p));
266217222abSNico Weber   u8 sval = *reinterpret_cast<u8*>(s);
267217222abSNico Weber   if (sval == kAsanArrayCookieMagic) return *p;
268217222abSNico Weber   // If sval is not kAsanArrayCookieMagic it can only be freed memory,
269217222abSNico Weber   // which means that we are going to get double-free. So, return 0 to avoid
270217222abSNico Weber   // infinite loop of destructors. We don't want to report a double-free here
271217222abSNico Weber   // though, so print a warning just in case.
272217222abSNico Weber   // CHECK_EQ(sval, kAsanHeapFreeMagic);
273217222abSNico Weber   if (sval == kAsanHeapFreeMagic) {
274217222abSNico Weber     Report("AddressSanitizer: loaded array cookie from free-d memory; "
275217222abSNico Weber            "expect a double-free report\n");
276217222abSNico Weber     return 0;
277217222abSNico Weber   }
278217222abSNico Weber   // The cookie may remain unpoisoned if e.g. it comes from a custom
279217222abSNico Weber   // operator new defined inside a class.
280217222abSNico Weber   return *p;
281217222abSNico Weber }
282217222abSNico Weber 
283217222abSNico Weber // This is a simplified version of __asan_(un)poison_memory_region, which
284217222abSNico Weber // assumes that left border of region to be poisoned is properly aligned.
285217222abSNico Weber static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) {
286217222abSNico Weber   if (size == 0) return;
287ad56941aSKirill Stoimenov   uptr aligned_size = size & ~(ASAN_SHADOW_GRANULARITY - 1);
288217222abSNico Weber   PoisonShadow(addr, aligned_size,
289217222abSNico Weber                do_poison ? kAsanStackUseAfterScopeMagic : 0);
290217222abSNico Weber   if (size == aligned_size)
291217222abSNico Weber     return;
292217222abSNico Weber   s8 end_offset = (s8)(size - aligned_size);
293217222abSNico Weber   s8* shadow_end = (s8*)MemToShadow(addr + aligned_size);
294217222abSNico Weber   s8 end_value = *shadow_end;
295217222abSNico Weber   if (do_poison) {
296217222abSNico Weber     // If possible, mark all the bytes mapping to last shadow byte as
297217222abSNico Weber     // unaddressable.
298217222abSNico Weber     if (end_value > 0 && end_value <= end_offset)
299217222abSNico Weber       *shadow_end = (s8)kAsanStackUseAfterScopeMagic;
300217222abSNico Weber   } else {
301217222abSNico Weber     // If necessary, mark few first bytes mapping to last shadow byte
302217222abSNico Weber     // as addressable
303217222abSNico Weber     if (end_value != 0)
304217222abSNico Weber       *shadow_end = Max(end_value, end_offset);
305217222abSNico Weber   }
306217222abSNico Weber }
307217222abSNico Weber 
308217222abSNico Weber void __asan_set_shadow_00(uptr addr, uptr size) {
309217222abSNico Weber   REAL(memset)((void *)addr, 0, size);
310217222abSNico Weber }
311217222abSNico Weber 
3120c35b616SRoy Sundahl void __asan_set_shadow_01(uptr addr, uptr size) {
3130c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x01, size);
3140c35b616SRoy Sundahl }
3150c35b616SRoy Sundahl 
3160c35b616SRoy Sundahl void __asan_set_shadow_02(uptr addr, uptr size) {
3170c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x02, size);
3180c35b616SRoy Sundahl }
3190c35b616SRoy Sundahl 
3200c35b616SRoy Sundahl void __asan_set_shadow_03(uptr addr, uptr size) {
3210c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x03, size);
3220c35b616SRoy Sundahl }
3230c35b616SRoy Sundahl 
3240c35b616SRoy Sundahl void __asan_set_shadow_04(uptr addr, uptr size) {
3250c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x04, size);
3260c35b616SRoy Sundahl }
3270c35b616SRoy Sundahl 
3280c35b616SRoy Sundahl void __asan_set_shadow_05(uptr addr, uptr size) {
3290c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x05, size);
3300c35b616SRoy Sundahl }
3310c35b616SRoy Sundahl 
3320c35b616SRoy Sundahl void __asan_set_shadow_06(uptr addr, uptr size) {
3330c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x06, size);
3340c35b616SRoy Sundahl }
3350c35b616SRoy Sundahl 
3360c35b616SRoy Sundahl void __asan_set_shadow_07(uptr addr, uptr size) {
3370c35b616SRoy Sundahl   REAL(memset)((void *)addr, 0x07, size);
3380c35b616SRoy Sundahl }
3390c35b616SRoy Sundahl 
340217222abSNico Weber void __asan_set_shadow_f1(uptr addr, uptr size) {
341217222abSNico Weber   REAL(memset)((void *)addr, 0xf1, size);
342217222abSNico Weber }
343217222abSNico Weber 
344217222abSNico Weber void __asan_set_shadow_f2(uptr addr, uptr size) {
345217222abSNico Weber   REAL(memset)((void *)addr, 0xf2, size);
346217222abSNico Weber }
347217222abSNico Weber 
348217222abSNico Weber void __asan_set_shadow_f3(uptr addr, uptr size) {
349217222abSNico Weber   REAL(memset)((void *)addr, 0xf3, size);
350217222abSNico Weber }
351217222abSNico Weber 
352217222abSNico Weber void __asan_set_shadow_f5(uptr addr, uptr size) {
353217222abSNico Weber   REAL(memset)((void *)addr, 0xf5, size);
354217222abSNico Weber }
355217222abSNico Weber 
356217222abSNico Weber void __asan_set_shadow_f8(uptr addr, uptr size) {
357217222abSNico Weber   REAL(memset)((void *)addr, 0xf8, size);
358217222abSNico Weber }
359217222abSNico Weber 
360217222abSNico Weber void __asan_poison_stack_memory(uptr addr, uptr size) {
361217222abSNico Weber   VReport(1, "poisoning: %p %zx\n", (void *)addr, size);
362217222abSNico Weber   PoisonAlignedStackMemory(addr, size, true);
363217222abSNico Weber }
364217222abSNico Weber 
365217222abSNico Weber void __asan_unpoison_stack_memory(uptr addr, uptr size) {
366217222abSNico Weber   VReport(1, "unpoisoning: %p %zx\n", (void *)addr, size);
367217222abSNico Weber   PoisonAlignedStackMemory(addr, size, false);
368217222abSNico Weber }
369217222abSNico Weber 
3708db7768aSVitaly Buka static void FixUnalignedStorage(uptr storage_beg, uptr storage_end,
371e1657e32SVitaly Buka                                 uptr &old_beg, uptr &old_end, uptr &new_beg,
372e1657e32SVitaly Buka                                 uptr &new_end) {
3738db7768aSVitaly Buka   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
3748db7768aSVitaly Buka   if (UNLIKELY(!AddrIsAlignedByGranularity(storage_end))) {
3758db7768aSVitaly Buka     uptr end_down = RoundDownTo(storage_end, granularity);
3768db7768aSVitaly Buka     // Ignore the last unaligned granule if the storage is followed by
3778db7768aSVitaly Buka     // unpoisoned byte, because we can't poison the prefix anyway. Don't call
3788db7768aSVitaly Buka     // AddressIsPoisoned at all if container changes does not affect the last
3798db7768aSVitaly Buka     // granule at all.
380e1657e32SVitaly Buka     if ((((old_end != new_end) && Max(old_end, new_end) > end_down) ||
381e1657e32SVitaly Buka          ((old_beg != new_beg) && Max(old_beg, new_beg) > end_down)) &&
382e1657e32SVitaly Buka         !AddressIsPoisoned(storage_end)) {
383e1657e32SVitaly Buka       old_beg = Min(end_down, old_beg);
3848db7768aSVitaly Buka       old_end = Min(end_down, old_end);
385e1657e32SVitaly Buka       new_beg = Min(end_down, new_beg);
3868db7768aSVitaly Buka       new_end = Min(end_down, new_end);
3878db7768aSVitaly Buka     }
3888db7768aSVitaly Buka   }
3898db7768aSVitaly Buka 
3908db7768aSVitaly Buka   // Handle misaligned begin and cut it off.
3918db7768aSVitaly Buka   if (UNLIKELY(!AddrIsAlignedByGranularity(storage_beg))) {
3928db7768aSVitaly Buka     uptr beg_up = RoundUpTo(storage_beg, granularity);
3938db7768aSVitaly Buka     // The first unaligned granule needs special handling only if we had bytes
3948db7768aSVitaly Buka     // there before and will have none after.
395e1657e32SVitaly Buka     if ((new_beg == new_end || new_beg >= beg_up) && old_beg != old_end &&
396e1657e32SVitaly Buka         old_beg < beg_up) {
3978db7768aSVitaly Buka       // Keep granule prefix outside of the storage unpoisoned.
3988db7768aSVitaly Buka       uptr beg_down = RoundDownTo(storage_beg, granularity);
3998db7768aSVitaly Buka       *(u8 *)MemToShadow(beg_down) = storage_beg - beg_down;
400e1657e32SVitaly Buka       old_beg = Max(beg_up, old_beg);
4018db7768aSVitaly Buka       old_end = Max(beg_up, old_end);
402e1657e32SVitaly Buka       new_beg = Max(beg_up, new_beg);
4038db7768aSVitaly Buka       new_end = Max(beg_up, new_end);
4048db7768aSVitaly Buka     }
4058db7768aSVitaly Buka   }
4068db7768aSVitaly Buka }
4078db7768aSVitaly Buka 
408217222abSNico Weber void __sanitizer_annotate_contiguous_container(const void *beg_p,
409217222abSNico Weber                                                const void *end_p,
410217222abSNico Weber                                                const void *old_mid_p,
411217222abSNico Weber                                                const void *new_mid_p) {
41238b5a370SVitaly Buka   if (!flags()->detect_container_overflow)
41338b5a370SVitaly Buka     return;
4143c0a4f26SVitaly Buka   VPrintf(3, "contiguous_container: %p %p %p %p\n", beg_p, end_p, old_mid_p,
415217222abSNico Weber           new_mid_p);
416b6c58751SVitaly Buka   uptr storage_beg = reinterpret_cast<uptr>(beg_p);
417b6c58751SVitaly Buka   uptr storage_end = reinterpret_cast<uptr>(end_p);
418b6c58751SVitaly Buka   uptr old_end = reinterpret_cast<uptr>(old_mid_p);
419b6c58751SVitaly Buka   uptr new_end = reinterpret_cast<uptr>(new_mid_p);
420e1657e32SVitaly Buka   uptr old_beg = storage_beg;
421e1657e32SVitaly Buka   uptr new_beg = storage_beg;
422ad56941aSKirill Stoimenov   uptr granularity = ASAN_SHADOW_GRANULARITY;
423b6c58751SVitaly Buka   if (!(storage_beg <= old_end && storage_beg <= new_end &&
424b6c58751SVitaly Buka         old_end <= storage_end && new_end <= storage_end)) {
425217222abSNico Weber     GET_STACK_TRACE_FATAL_HERE;
426b6c58751SVitaly Buka     ReportBadParamsToAnnotateContiguousContainer(storage_beg, storage_end,
427b6c58751SVitaly Buka                                                  old_end, new_end, &stack);
428217222abSNico Weber   }
429b6c58751SVitaly Buka   CHECK_LE(storage_end - storage_beg,
430bc897badSVitaly Buka            FIRST_32_SECOND_64(1UL << 30, 1ULL << 40));  // Sanity check.
431217222abSNico Weber 
432b6c58751SVitaly Buka   if (old_end == new_end)
433dd1b7b79SAdvenam Tacet     return;  // Nothing to do here.
434dd1b7b79SAdvenam Tacet 
435e1657e32SVitaly Buka   FixUnalignedStorage(storage_beg, storage_end, old_beg, old_end, new_beg,
436e1657e32SVitaly Buka                       new_end);
437dd1b7b79SAdvenam Tacet 
438b6c58751SVitaly Buka   uptr a = RoundDownTo(Min(old_end, new_end), granularity);
439b6c58751SVitaly Buka   uptr c = RoundUpTo(Max(old_end, new_end), granularity);
440b6c58751SVitaly Buka   uptr d1 = RoundDownTo(old_end, granularity);
441217222abSNico Weber   // uptr d2 = RoundUpTo(old_mid, granularity);
442217222abSNico Weber   // Currently we should be in this state:
443217222abSNico Weber   // [a, d1) is good, [d2, c) is bad, [d1, d2) is partially good.
444217222abSNico Weber   // Make a quick sanity check that we are indeed in this state.
445217222abSNico Weber   //
446217222abSNico Weber   // FIXME: Two of these three checks are disabled until we fix
447217222abSNico Weber   // https://github.com/google/sanitizers/issues/258.
448217222abSNico Weber   // if (d1 != d2)
4490b2c0dc6SAdvenam Tacet   //  DCHECK_EQ(*(u8*)MemToShadow(d1), old_mid - d1);
45082694d9cSAlexandre Ganea   //
45182694d9cSAlexandre Ganea   // NOTE: curly brackets for the "if" below to silence a MSVC warning.
45282694d9cSAlexandre Ganea   if (a + granularity <= d1) {
4530b2c0dc6SAdvenam Tacet     DCHECK_EQ(*(u8 *)MemToShadow(a), 0);
45482694d9cSAlexandre Ganea   }
455217222abSNico Weber   // if (d2 + granularity <= c && c <= end)
4560b2c0dc6SAdvenam Tacet   //   DCHECK_EQ(*(u8 *)MemToShadow(c - granularity),
457217222abSNico Weber   //            kAsanContiguousContainerOOBMagic);
458217222abSNico Weber 
459b6c58751SVitaly Buka   uptr b1 = RoundDownTo(new_end, granularity);
460b6c58751SVitaly Buka   uptr b2 = RoundUpTo(new_end, granularity);
461217222abSNico Weber   // New state:
462217222abSNico Weber   // [a, b1) is good, [b2, c) is bad, [b1, b2) is partially good.
463d13dd10dSVitaly Buka   if (b1 > a)
464217222abSNico Weber     PoisonShadow(a, b1 - a, 0);
465d13dd10dSVitaly Buka   else if (c > b2)
466217222abSNico Weber     PoisonShadow(b2, c - b2, kAsanContiguousContainerOOBMagic);
467217222abSNico Weber   if (b1 != b2) {
468217222abSNico Weber     CHECK_EQ(b2 - b1, granularity);
469b6c58751SVitaly Buka     *(u8 *)MemToShadow(b1) = static_cast<u8>(new_end - b1);
470217222abSNico Weber   }
471217222abSNico Weber }
472217222abSNico Weber 
4731c5ad6d2SAdvenam Tacet // Annotates a double ended contiguous memory area like std::deque's chunk.
4741c5ad6d2SAdvenam Tacet // It allows detecting buggy accesses to allocated but not used begining
4751c5ad6d2SAdvenam Tacet // or end items of such a container.
4761c5ad6d2SAdvenam Tacet void __sanitizer_annotate_double_ended_contiguous_container(
4771c5ad6d2SAdvenam Tacet     const void *storage_beg_p, const void *storage_end_p,
4781c5ad6d2SAdvenam Tacet     const void *old_container_beg_p, const void *old_container_end_p,
4791c5ad6d2SAdvenam Tacet     const void *new_container_beg_p, const void *new_container_end_p) {
4801c5ad6d2SAdvenam Tacet   if (!flags()->detect_container_overflow)
4811c5ad6d2SAdvenam Tacet     return;
4821c5ad6d2SAdvenam Tacet 
4833c0a4f26SVitaly Buka   VPrintf(3, "contiguous_container: %p %p %p %p %p %p\n", storage_beg_p,
4841c5ad6d2SAdvenam Tacet           storage_end_p, old_container_beg_p, old_container_end_p,
4851c5ad6d2SAdvenam Tacet           new_container_beg_p, new_container_end_p);
4861c5ad6d2SAdvenam Tacet 
4871c5ad6d2SAdvenam Tacet   uptr storage_beg = reinterpret_cast<uptr>(storage_beg_p);
4881c5ad6d2SAdvenam Tacet   uptr storage_end = reinterpret_cast<uptr>(storage_end_p);
4891c5ad6d2SAdvenam Tacet   uptr old_beg = reinterpret_cast<uptr>(old_container_beg_p);
4901c5ad6d2SAdvenam Tacet   uptr old_end = reinterpret_cast<uptr>(old_container_end_p);
4911c5ad6d2SAdvenam Tacet   uptr new_beg = reinterpret_cast<uptr>(new_container_beg_p);
4921c5ad6d2SAdvenam Tacet   uptr new_end = reinterpret_cast<uptr>(new_container_end_p);
4931c5ad6d2SAdvenam Tacet 
4941c5ad6d2SAdvenam Tacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
4951c5ad6d2SAdvenam Tacet 
4960d07922dSVitaly Buka   if (!(old_beg <= old_end && new_beg <= new_end) ||
4970d07922dSVitaly Buka       !(storage_beg <= new_beg && new_end <= storage_end) ||
4980d07922dSVitaly Buka       !(storage_beg <= old_beg && old_end <= storage_end)) {
4991c5ad6d2SAdvenam Tacet     GET_STACK_TRACE_FATAL_HERE;
5001c5ad6d2SAdvenam Tacet     ReportBadParamsToAnnotateDoubleEndedContiguousContainer(
5011c5ad6d2SAdvenam Tacet         storage_beg, storage_end, old_beg, old_end, new_beg, new_end, &stack);
5021c5ad6d2SAdvenam Tacet   }
503450ec770SVitaly Buka   CHECK_LE(storage_end - storage_beg,
504450ec770SVitaly Buka            FIRST_32_SECOND_64(1UL << 30, 1ULL << 40));  // Sanity check.
505450ec770SVitaly Buka 
506450ec770SVitaly Buka   if ((old_beg == old_end && new_beg == new_end) ||
507450ec770SVitaly Buka       (old_beg == new_beg && old_end == new_end))
508450ec770SVitaly Buka     return;  // Nothing to do here.
5091c5ad6d2SAdvenam Tacet 
510e1657e32SVitaly Buka   FixUnalignedStorage(storage_beg, storage_end, old_beg, old_end, new_beg,
511e1657e32SVitaly Buka                       new_end);
5121c5ad6d2SAdvenam Tacet 
5132b026fedSVitaly Buka   // Handle non-intersecting new/old containers separately have simpler
5142b026fedSVitaly Buka   // intersecting case.
5152b026fedSVitaly Buka   if (old_beg == old_end || new_beg == new_end || new_end <= old_beg ||
5162b026fedSVitaly Buka       old_end <= new_beg) {
5172b026fedSVitaly Buka     if (old_beg != old_end) {
5182b026fedSVitaly Buka       // Poisoning the old container.
5191c5ad6d2SAdvenam Tacet       uptr a = RoundDownTo(old_beg, granularity);
5201c5ad6d2SAdvenam Tacet       uptr b = RoundUpTo(old_end, granularity);
5211c5ad6d2SAdvenam Tacet       PoisonShadow(a, b - a, kAsanContiguousContainerOOBMagic);
5221c5ad6d2SAdvenam Tacet     }
5231c5ad6d2SAdvenam Tacet 
5242b026fedSVitaly Buka     if (new_beg != new_end) {
5252b026fedSVitaly Buka       // Unpoisoning the new container.
5262b026fedSVitaly Buka       uptr a = RoundDownTo(new_beg, granularity);
5272b026fedSVitaly Buka       uptr b = RoundDownTo(new_end, granularity);
5282b026fedSVitaly Buka       PoisonShadow(a, b - a, 0);
5292b026fedSVitaly Buka       if (!AddrIsAlignedByGranularity(new_end))
5302b026fedSVitaly Buka         *(u8 *)MemToShadow(b) = static_cast<u8>(new_end - b);
5312b026fedSVitaly Buka     }
5322b026fedSVitaly Buka 
5332b026fedSVitaly Buka     return;
5342b026fedSVitaly Buka   }
5352b026fedSVitaly Buka 
5362b026fedSVitaly Buka   // Intersection of old and new containers is not empty.
5372b026fedSVitaly Buka   CHECK_LT(new_beg, old_end);
5382b026fedSVitaly Buka   CHECK_GT(new_end, old_beg);
5392b026fedSVitaly Buka 
5402b026fedSVitaly Buka   if (new_beg < old_beg) {
541cc278601SVitaly Buka     // Round down because we can't poison prefixes.
5421c5ad6d2SAdvenam Tacet     uptr a = RoundDownTo(new_beg, granularity);
543cc278601SVitaly Buka     // Round down and ignore the [c, old_beg) as its state defined by unchanged
544cc278601SVitaly Buka     // [old_beg, old_end).
5451c5ad6d2SAdvenam Tacet     uptr c = RoundDownTo(old_beg, granularity);
5461c5ad6d2SAdvenam Tacet     PoisonShadow(a, c - a, 0);
5472b026fedSVitaly Buka   } else if (new_beg > old_beg) {
548cc278601SVitaly Buka     // Round down and poison [a, old_beg) because it was unpoisoned only as a
549cc278601SVitaly Buka     // prefix.
5501c5ad6d2SAdvenam Tacet     uptr a = RoundDownTo(old_beg, granularity);
551cc278601SVitaly Buka     // Round down and ignore the [c, new_beg) as its state defined by unchanged
552cc278601SVitaly Buka     // [new_beg, old_end).
5531c5ad6d2SAdvenam Tacet     uptr c = RoundDownTo(new_beg, granularity);
554cc278601SVitaly Buka 
5551c5ad6d2SAdvenam Tacet     PoisonShadow(a, c - a, kAsanContiguousContainerOOBMagic);
5561c5ad6d2SAdvenam Tacet   }
5571c5ad6d2SAdvenam Tacet 
5582b026fedSVitaly Buka   if (new_end > old_end) {
559cc278601SVitaly Buka     // Round down to poison the prefix.
5601c5ad6d2SAdvenam Tacet     uptr a = RoundDownTo(old_end, granularity);
561cc278601SVitaly Buka     // Round down and handle remainder below.
5621c5ad6d2SAdvenam Tacet     uptr c = RoundDownTo(new_end, granularity);
5631c5ad6d2SAdvenam Tacet     PoisonShadow(a, c - a, 0);
564cc278601SVitaly Buka     if (!AddrIsAlignedByGranularity(new_end))
5651c5ad6d2SAdvenam Tacet       *(u8 *)MemToShadow(c) = static_cast<u8>(new_end - c);
5662b026fedSVitaly Buka   } else if (new_end < old_end) {
567cc278601SVitaly Buka     // Round up and handle remained below.
5681c5ad6d2SAdvenam Tacet     uptr a2 = RoundUpTo(new_end, granularity);
569cc278601SVitaly Buka     // Round up to poison entire granule as we had nothing in [old_end, c2).
5701c5ad6d2SAdvenam Tacet     uptr c2 = RoundUpTo(old_end, granularity);
5711c5ad6d2SAdvenam Tacet     PoisonShadow(a2, c2 - a2, kAsanContiguousContainerOOBMagic);
572cc278601SVitaly Buka 
5732b026fedSVitaly Buka     if (!AddrIsAlignedByGranularity(new_end)) {
5742b026fedSVitaly Buka       uptr a = RoundDownTo(new_end, granularity);
5751c5ad6d2SAdvenam Tacet       *(u8 *)MemToShadow(a) = static_cast<u8>(new_end - a);
5761c5ad6d2SAdvenam Tacet     }
5771c5ad6d2SAdvenam Tacet   }
5781c5ad6d2SAdvenam Tacet }
5791c5ad6d2SAdvenam Tacet 
580*c76045d9STacet // Marks the specified number of bytes in a granule as accessible or
581*c76045d9STacet // poisones the whole granule with kAsanContiguousContainerOOBMagic value.
582*c76045d9STacet static void SetContainerGranule(uptr ptr, u8 n) {
583*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
584*c76045d9STacet   u8 s = (n == granularity) ? 0 : (n ? n : kAsanContiguousContainerOOBMagic);
585*c76045d9STacet   *(u8 *)MemToShadow(ptr) = s;
586*c76045d9STacet }
587*c76045d9STacet 
588*c76045d9STacet // Performs a byte-by-byte copy of ASan annotations (shadow memory values).
589*c76045d9STacet // Result may be different due to ASan limitations, but result cannot lead
590*c76045d9STacet // to false positives (more memory than requested may get unpoisoned).
591*c76045d9STacet static void SlowCopyContainerAnnotations(uptr src_beg, uptr src_end,
592*c76045d9STacet                                          uptr dst_beg, uptr dst_end) {
593*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
594*c76045d9STacet   uptr dst_end_down = RoundDownTo(dst_end, granularity);
595*c76045d9STacet   uptr src_ptr = src_beg;
596*c76045d9STacet   uptr dst_ptr = dst_beg;
597*c76045d9STacet 
598*c76045d9STacet   while (dst_ptr < dst_end) {
599*c76045d9STacet     uptr granule_beg = RoundDownTo(dst_ptr, granularity);
600*c76045d9STacet     uptr granule_end = granule_beg + granularity;
601*c76045d9STacet     uptr unpoisoned_bytes = 0;
602*c76045d9STacet 
603*c76045d9STacet     uptr end = Min(granule_end, dst_end);
604*c76045d9STacet     for (; dst_ptr != end; ++dst_ptr, ++src_ptr)
605*c76045d9STacet       if (!AddressIsPoisoned(src_ptr))
606*c76045d9STacet         unpoisoned_bytes = dst_ptr - granule_beg + 1;
607*c76045d9STacet 
608*c76045d9STacet     if (dst_ptr == dst_end && dst_end != dst_end_down &&
609*c76045d9STacet         !AddressIsPoisoned(dst_end))
610*c76045d9STacet       continue;
611*c76045d9STacet 
612*c76045d9STacet     if (unpoisoned_bytes != 0 || granule_beg >= dst_beg)
613*c76045d9STacet       SetContainerGranule(granule_beg, unpoisoned_bytes);
614*c76045d9STacet     else if (!AddressIsPoisoned(dst_beg))
615*c76045d9STacet       SetContainerGranule(granule_beg, dst_beg - granule_beg);
616*c76045d9STacet   }
617*c76045d9STacet }
618*c76045d9STacet 
619*c76045d9STacet // Performs a byte-by-byte copy of ASan annotations (shadow memory values),
620*c76045d9STacet // going through bytes in reversed order, but not reversing annotations.
621*c76045d9STacet // Result may be different due to ASan limitations, but result cannot lead
622*c76045d9STacet // to false positives (more memory than requested may get unpoisoned).
623*c76045d9STacet static void SlowReversedCopyContainerAnnotations(uptr src_beg, uptr src_end,
624*c76045d9STacet                                                  uptr dst_beg, uptr dst_end) {
625*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
626*c76045d9STacet   uptr dst_end_down = RoundDownTo(dst_end, granularity);
627*c76045d9STacet   uptr src_ptr = src_end;
628*c76045d9STacet   uptr dst_ptr = dst_end;
629*c76045d9STacet 
630*c76045d9STacet   while (dst_ptr > dst_beg) {
631*c76045d9STacet     uptr granule_beg = RoundDownTo(dst_ptr - 1, granularity);
632*c76045d9STacet     uptr unpoisoned_bytes = 0;
633*c76045d9STacet 
634*c76045d9STacet     uptr end = Max(granule_beg, dst_beg);
635*c76045d9STacet     for (; dst_ptr != end; --dst_ptr, --src_ptr)
636*c76045d9STacet       if (unpoisoned_bytes == 0 && !AddressIsPoisoned(src_ptr - 1))
637*c76045d9STacet         unpoisoned_bytes = dst_ptr - granule_beg;
638*c76045d9STacet 
639*c76045d9STacet     if (dst_ptr >= dst_end_down && !AddressIsPoisoned(dst_end))
640*c76045d9STacet       continue;
641*c76045d9STacet 
642*c76045d9STacet     if (granule_beg == dst_ptr || unpoisoned_bytes != 0)
643*c76045d9STacet       SetContainerGranule(granule_beg, unpoisoned_bytes);
644*c76045d9STacet     else if (!AddressIsPoisoned(dst_beg))
645*c76045d9STacet       SetContainerGranule(granule_beg, dst_beg - granule_beg);
646*c76045d9STacet   }
647*c76045d9STacet }
648*c76045d9STacet 
649*c76045d9STacet // A helper function for __sanitizer_copy_contiguous_container_annotations,
650*c76045d9STacet // has assumption about begin and end of the container.
651*c76045d9STacet // Should not be used stand alone.
652*c76045d9STacet static void CopyContainerFirstGranuleAnnotation(uptr src_beg, uptr dst_beg) {
653*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
654*c76045d9STacet   // First granule
655*c76045d9STacet   uptr src_beg_down = RoundDownTo(src_beg, granularity);
656*c76045d9STacet   uptr dst_beg_down = RoundDownTo(dst_beg, granularity);
657*c76045d9STacet   if (dst_beg_down == dst_beg)
658*c76045d9STacet     return;
659*c76045d9STacet   if (!AddressIsPoisoned(src_beg))
660*c76045d9STacet     *(u8 *)MemToShadow(dst_beg_down) = *(u8 *)MemToShadow(src_beg_down);
661*c76045d9STacet   else if (!AddressIsPoisoned(dst_beg))
662*c76045d9STacet     SetContainerGranule(dst_beg_down, dst_beg - dst_beg_down);
663*c76045d9STacet }
664*c76045d9STacet 
665*c76045d9STacet // A helper function for __sanitizer_copy_contiguous_container_annotations,
666*c76045d9STacet // has assumption about begin and end of the container.
667*c76045d9STacet // Should not be used stand alone.
668*c76045d9STacet static void CopyContainerLastGranuleAnnotation(uptr src_end, uptr dst_end) {
669*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
670*c76045d9STacet   // Last granule
671*c76045d9STacet   uptr src_end_down = RoundDownTo(src_end, granularity);
672*c76045d9STacet   uptr dst_end_down = RoundDownTo(dst_end, granularity);
673*c76045d9STacet   if (dst_end_down == dst_end || !AddressIsPoisoned(dst_end))
674*c76045d9STacet     return;
675*c76045d9STacet   if (AddressIsPoisoned(src_end))
676*c76045d9STacet     *(u8 *)MemToShadow(dst_end_down) = *(u8 *)MemToShadow(src_end_down);
677*c76045d9STacet   else
678*c76045d9STacet     SetContainerGranule(dst_end_down, src_end - src_end_down);
679*c76045d9STacet }
680*c76045d9STacet 
681*c76045d9STacet // This function copies ASan memory annotations (poisoned/unpoisoned states)
682*c76045d9STacet // from one buffer to another.
683*c76045d9STacet // It's main purpose is to help with relocating trivially relocatable objects,
684*c76045d9STacet // which memory may be poisoned, without calling copy constructor.
685*c76045d9STacet // However, it does not move memory content itself, only annotations.
686*c76045d9STacet // If the buffers aren't aligned (the distance between buffers isn't
687*c76045d9STacet // granule-aligned)
688*c76045d9STacet //     // src_beg % granularity != dst_beg % granularity
689*c76045d9STacet // the function handles this by going byte by byte, slowing down performance.
690*c76045d9STacet // The old buffer annotations are not removed. If necessary,
691*c76045d9STacet // user can unpoison old buffer with __asan_unpoison_memory_region.
692*c76045d9STacet void __sanitizer_copy_contiguous_container_annotations(const void *src_beg_p,
693*c76045d9STacet                                                        const void *src_end_p,
694*c76045d9STacet                                                        const void *dst_beg_p,
695*c76045d9STacet                                                        const void *dst_end_p) {
696*c76045d9STacet   if (!flags()->detect_container_overflow)
697*c76045d9STacet     return;
698*c76045d9STacet 
699*c76045d9STacet   VPrintf(3, "contiguous_container_src: %p %p\n", src_beg_p, src_end_p);
700*c76045d9STacet   VPrintf(3, "contiguous_container_dst: %p %p\n", dst_beg_p, dst_end_p);
701*c76045d9STacet 
702*c76045d9STacet   uptr src_beg = reinterpret_cast<uptr>(src_beg_p);
703*c76045d9STacet   uptr src_end = reinterpret_cast<uptr>(src_end_p);
704*c76045d9STacet   uptr dst_beg = reinterpret_cast<uptr>(dst_beg_p);
705*c76045d9STacet   uptr dst_end = reinterpret_cast<uptr>(dst_end_p);
706*c76045d9STacet 
707*c76045d9STacet   constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
708*c76045d9STacet 
709*c76045d9STacet   if (src_beg > src_end || (dst_end - dst_beg) != (src_end - src_beg)) {
710*c76045d9STacet     GET_STACK_TRACE_FATAL_HERE;
711*c76045d9STacet     ReportBadParamsToCopyContiguousContainerAnnotations(
712*c76045d9STacet         src_beg, src_end, dst_beg, dst_end, &stack);
713*c76045d9STacet   }
714*c76045d9STacet 
715*c76045d9STacet   if (src_beg == src_end || src_beg == dst_beg)
716*c76045d9STacet     return;
717*c76045d9STacet   // Due to support for overlapping buffers, we may have to copy elements
718*c76045d9STacet   // in reversed order, when destination buffer starts in the middle of
719*c76045d9STacet   // the source buffer (or shares first granule with it).
720*c76045d9STacet   //
721*c76045d9STacet   // When buffers are not granule-aligned (or distance between them,
722*c76045d9STacet   // to be specific), annotatios have to be copied byte by byte.
723*c76045d9STacet   //
724*c76045d9STacet   // The only remaining edge cases involve edge granules,
725*c76045d9STacet   // when the container starts or ends within a granule.
726*c76045d9STacet   uptr src_beg_up = RoundUpTo(src_beg, granularity);
727*c76045d9STacet   uptr src_end_up = RoundUpTo(src_end, granularity);
728*c76045d9STacet   bool copy_in_reversed_order = src_beg < dst_beg && dst_beg <= src_end_up;
729*c76045d9STacet   if (src_beg % granularity != dst_beg % granularity ||
730*c76045d9STacet       RoundDownTo(dst_end - 1, granularity) <= dst_beg) {
731*c76045d9STacet     if (copy_in_reversed_order)
732*c76045d9STacet       SlowReversedCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
733*c76045d9STacet     else
734*c76045d9STacet       SlowCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
735*c76045d9STacet     return;
736*c76045d9STacet   }
737*c76045d9STacet 
738*c76045d9STacet   // As buffers are granule-aligned, we can just copy annotations of granules
739*c76045d9STacet   // from the middle.
740*c76045d9STacet   uptr dst_beg_up = RoundUpTo(dst_beg, granularity);
741*c76045d9STacet   uptr dst_end_down = RoundDownTo(dst_end, granularity);
742*c76045d9STacet   if (copy_in_reversed_order)
743*c76045d9STacet     CopyContainerLastGranuleAnnotation(src_end, dst_end);
744*c76045d9STacet   else
745*c76045d9STacet     CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
746*c76045d9STacet 
747*c76045d9STacet   if (dst_beg_up < dst_end_down) {
748*c76045d9STacet     internal_memmove((u8 *)MemToShadow(dst_beg_up),
749*c76045d9STacet                      (u8 *)MemToShadow(src_beg_up),
750*c76045d9STacet                      (dst_end_down - dst_beg_up) / granularity);
751*c76045d9STacet   }
752*c76045d9STacet 
753*c76045d9STacet   if (copy_in_reversed_order)
754*c76045d9STacet     CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
755*c76045d9STacet   else
756*c76045d9STacet     CopyContainerLastGranuleAnnotation(src_end, dst_end);
757*c76045d9STacet }
758*c76045d9STacet 
7594883c91aSVitaly Buka static const void *FindBadAddress(uptr begin, uptr end, bool poisoned) {
7604883c91aSVitaly Buka   CHECK_LE(begin, end);
7614883c91aSVitaly Buka   constexpr uptr kMaxRangeToCheck = 32;
7624883c91aSVitaly Buka   if (end - begin > kMaxRangeToCheck * 2) {
7634883c91aSVitaly Buka     if (auto *bad = FindBadAddress(begin, begin + kMaxRangeToCheck, poisoned))
7644883c91aSVitaly Buka       return bad;
7654883c91aSVitaly Buka     if (auto *bad = FindBadAddress(end - kMaxRangeToCheck, end, poisoned))
7664883c91aSVitaly Buka       return bad;
7674883c91aSVitaly Buka   }
7684883c91aSVitaly Buka 
7694883c91aSVitaly Buka   for (uptr i = begin; i < end; ++i)
7704883c91aSVitaly Buka     if (AddressIsPoisoned(i) != poisoned)
7714883c91aSVitaly Buka       return reinterpret_cast<const void *>(i);
7724883c91aSVitaly Buka   return nullptr;
7734883c91aSVitaly Buka }
7744883c91aSVitaly Buka 
775217222abSNico Weber const void *__sanitizer_contiguous_container_find_bad_address(
776217222abSNico Weber     const void *beg_p, const void *mid_p, const void *end_p) {
777217222abSNico Weber   if (!flags()->detect_container_overflow)
778217222abSNico Weber     return nullptr;
779dd1b7b79SAdvenam Tacet   uptr granularity = ASAN_SHADOW_GRANULARITY;
780217222abSNico Weber   uptr beg = reinterpret_cast<uptr>(beg_p);
781217222abSNico Weber   uptr end = reinterpret_cast<uptr>(end_p);
7824883c91aSVitaly Buka   uptr mid = reinterpret_cast<uptr>(mid_p);
7834883c91aSVitaly Buka   CHECK_LE(beg, mid);
7844883c91aSVitaly Buka   CHECK_LE(mid, end);
7854883c91aSVitaly Buka   // If the byte after the storage is unpoisoned, everything in the granule
7864883c91aSVitaly Buka   // before must stay unpoisoned.
787dd1b7b79SAdvenam Tacet   uptr annotations_end =
788dd1b7b79SAdvenam Tacet       (!AddrIsAlignedByGranularity(end) && !AddressIsPoisoned(end))
789dd1b7b79SAdvenam Tacet           ? RoundDownTo(end, granularity)
790dd1b7b79SAdvenam Tacet           : end;
7914883c91aSVitaly Buka   beg = Min(beg, annotations_end);
7924883c91aSVitaly Buka   mid = Min(mid, annotations_end);
7934883c91aSVitaly Buka   if (auto *bad = FindBadAddress(beg, mid, false))
7944883c91aSVitaly Buka     return bad;
7954883c91aSVitaly Buka   if (auto *bad = FindBadAddress(mid, annotations_end, true))
7964883c91aSVitaly Buka     return bad;
7974883c91aSVitaly Buka   return FindBadAddress(annotations_end, end, false);
798217222abSNico Weber }
799217222abSNico Weber 
800217222abSNico Weber int __sanitizer_verify_contiguous_container(const void *beg_p,
801217222abSNico Weber                                             const void *mid_p,
802217222abSNico Weber                                             const void *end_p) {
803217222abSNico Weber   return __sanitizer_contiguous_container_find_bad_address(beg_p, mid_p,
804217222abSNico Weber                                                            end_p) == nullptr;
805217222abSNico Weber }
806217222abSNico Weber 
8071c5ad6d2SAdvenam Tacet const void *__sanitizer_double_ended_contiguous_container_find_bad_address(
8081c5ad6d2SAdvenam Tacet     const void *storage_beg_p, const void *container_beg_p,
8091c5ad6d2SAdvenam Tacet     const void *container_end_p, const void *storage_end_p) {
810cef078a8SVitaly Buka   if (!flags()->detect_container_overflow)
8111c5ad6d2SAdvenam Tacet     return nullptr;
812cef078a8SVitaly Buka   uptr granularity = ASAN_SHADOW_GRANULARITY;
813cef078a8SVitaly Buka   uptr storage_beg = reinterpret_cast<uptr>(storage_beg_p);
814cef078a8SVitaly Buka   uptr storage_end = reinterpret_cast<uptr>(storage_end_p);
815cef078a8SVitaly Buka   uptr beg = reinterpret_cast<uptr>(container_beg_p);
816cef078a8SVitaly Buka   uptr end = reinterpret_cast<uptr>(container_end_p);
817cef078a8SVitaly Buka 
818cef078a8SVitaly Buka   // The prefix of the firs granule of the container is unpoisoned.
819cef078a8SVitaly Buka   if (beg != end)
820cef078a8SVitaly Buka     beg = Max(storage_beg, RoundDownTo(beg, granularity));
821cef078a8SVitaly Buka 
822cef078a8SVitaly Buka   // If the byte after the storage is unpoisoned, the prefix of the last granule
823cef078a8SVitaly Buka   // is unpoisoned.
824cef078a8SVitaly Buka   uptr annotations_end = (!AddrIsAlignedByGranularity(storage_end) &&
825cef078a8SVitaly Buka                           !AddressIsPoisoned(storage_end))
826cef078a8SVitaly Buka                              ? RoundDownTo(storage_end, granularity)
827cef078a8SVitaly Buka                              : storage_end;
828cef078a8SVitaly Buka   storage_beg = Min(storage_beg, annotations_end);
829cef078a8SVitaly Buka   beg = Min(beg, annotations_end);
830cef078a8SVitaly Buka   end = Min(end, annotations_end);
831cef078a8SVitaly Buka 
832cef078a8SVitaly Buka   if (auto *bad = FindBadAddress(storage_beg, beg, true))
833cef078a8SVitaly Buka     return bad;
834cef078a8SVitaly Buka   if (auto *bad = FindBadAddress(beg, end, false))
835cef078a8SVitaly Buka     return bad;
836cef078a8SVitaly Buka   if (auto *bad = FindBadAddress(end, annotations_end, true))
837cef078a8SVitaly Buka     return bad;
838cef078a8SVitaly Buka   return FindBadAddress(annotations_end, storage_end, false);
8391c5ad6d2SAdvenam Tacet }
8401c5ad6d2SAdvenam Tacet 
8411c5ad6d2SAdvenam Tacet int __sanitizer_verify_double_ended_contiguous_container(
8421c5ad6d2SAdvenam Tacet     const void *storage_beg_p, const void *container_beg_p,
8431c5ad6d2SAdvenam Tacet     const void *container_end_p, const void *storage_end_p) {
8441c5ad6d2SAdvenam Tacet   return __sanitizer_double_ended_contiguous_container_find_bad_address(
8451c5ad6d2SAdvenam Tacet              storage_beg_p, container_beg_p, container_end_p, storage_end_p) ==
8461c5ad6d2SAdvenam Tacet          nullptr;
8471c5ad6d2SAdvenam Tacet }
8481c5ad6d2SAdvenam Tacet 
849217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
850217222abSNico Weber void __asan_poison_intra_object_redzone(uptr ptr, uptr size) {
851217222abSNico Weber   AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, true);
852217222abSNico Weber }
853217222abSNico Weber 
854217222abSNico Weber extern "C" SANITIZER_INTERFACE_ATTRIBUTE
855217222abSNico Weber void __asan_unpoison_intra_object_redzone(uptr ptr, uptr size) {
856217222abSNico Weber   AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, false);
857217222abSNico Weber }
858217222abSNico Weber 
859217222abSNico Weber // --- Implementation of LSan-specific functions --- {{{1
860217222abSNico Weber namespace __lsan {
861217222abSNico Weber bool WordIsPoisoned(uptr addr) {
862217222abSNico Weber   return (__asan_region_is_poisoned(addr, sizeof(uptr)) != 0);
863217222abSNico Weber }
864217222abSNico Weber }
865