xref: /llvm-project/compiler-rt/lib/tsan/tests/unit/tsan_shadow_test.cpp (revision a7cf694c4950b7ce5309c3c4d613f40966c8f7ae)
15de29a4bSNico Weber //===-- tsan_shadow_test.cpp ----------------------------------------------===//
25de29a4bSNico Weber //
35de29a4bSNico Weber // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
45de29a4bSNico Weber // See https://llvm.org/LICENSE.txt for license information.
55de29a4bSNico Weber // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
65de29a4bSNico Weber //
75de29a4bSNico Weber //===----------------------------------------------------------------------===//
85de29a4bSNico Weber //
95de29a4bSNico Weber // This file is a part of ThreadSanitizer (TSan), a race detector.
105de29a4bSNico Weber //
115de29a4bSNico Weber //===----------------------------------------------------------------------===//
125de29a4bSNico Weber #include "tsan_platform.h"
135de29a4bSNico Weber #include "tsan_rtl.h"
145de29a4bSNico Weber #include "gtest/gtest.h"
155de29a4bSNico Weber 
165de29a4bSNico Weber namespace __tsan {
175de29a4bSNico Weber 
18682d635aSThurston Dang struct Region {
19682d635aSThurston Dang   uptr start;
20682d635aSThurston Dang   uptr end;
21682d635aSThurston Dang };
22682d635aSThurston Dang 
CheckShadow(const Shadow * s,Sid sid,Epoch epoch,uptr addr,uptr size,AccessType typ)23b3321349SDmitry Vyukov void CheckShadow(const Shadow *s, Sid sid, Epoch epoch, uptr addr, uptr size,
24b3321349SDmitry Vyukov                  AccessType typ) {
25b3321349SDmitry Vyukov   uptr addr1 = 0;
26b3321349SDmitry Vyukov   uptr size1 = 0;
27b3321349SDmitry Vyukov   AccessType typ1 = 0;
28b3321349SDmitry Vyukov   s->GetAccess(&addr1, &size1, &typ1);
29b3321349SDmitry Vyukov   CHECK_EQ(s->sid(), sid);
30b3321349SDmitry Vyukov   CHECK_EQ(s->epoch(), epoch);
31b3321349SDmitry Vyukov   CHECK_EQ(addr1, addr);
32b3321349SDmitry Vyukov   CHECK_EQ(size1, size);
33b3321349SDmitry Vyukov   CHECK_EQ(typ1, typ);
3479fbba9bSDmitry Vyukov }
35b3321349SDmitry Vyukov 
TEST(Shadow,Shadow)36b3321349SDmitry Vyukov TEST(Shadow, Shadow) {
37b3321349SDmitry Vyukov   Sid sid = static_cast<Sid>(11);
38b3321349SDmitry Vyukov   Epoch epoch = static_cast<Epoch>(22);
39b3321349SDmitry Vyukov   FastState fs;
40b3321349SDmitry Vyukov   fs.SetSid(sid);
41b3321349SDmitry Vyukov   fs.SetEpoch(epoch);
42b3321349SDmitry Vyukov   CHECK_EQ(fs.sid(), sid);
43b3321349SDmitry Vyukov   CHECK_EQ(fs.epoch(), epoch);
44b3321349SDmitry Vyukov   CHECK_EQ(fs.GetIgnoreBit(), false);
45b3321349SDmitry Vyukov   fs.SetIgnoreBit();
46b3321349SDmitry Vyukov   CHECK_EQ(fs.GetIgnoreBit(), true);
47b3321349SDmitry Vyukov   fs.ClearIgnoreBit();
48b3321349SDmitry Vyukov   CHECK_EQ(fs.GetIgnoreBit(), false);
49b3321349SDmitry Vyukov 
50b3321349SDmitry Vyukov   Shadow s0(fs, 1, 2, kAccessWrite);
51b3321349SDmitry Vyukov   CheckShadow(&s0, sid, epoch, 1, 2, kAccessWrite);
52b3321349SDmitry Vyukov   Shadow s1(fs, 2, 3, kAccessRead);
53b3321349SDmitry Vyukov   CheckShadow(&s1, sid, epoch, 2, 3, kAccessRead);
54b3321349SDmitry Vyukov   Shadow s2(fs, 0xfffff8 + 4, 1, kAccessWrite | kAccessAtomic);
55b3321349SDmitry Vyukov   CheckShadow(&s2, sid, epoch, 4, 1, kAccessWrite | kAccessAtomic);
56b3321349SDmitry Vyukov   Shadow s3(fs, 0xfffff8 + 0, 8, kAccessRead | kAccessAtomic);
57b3321349SDmitry Vyukov   CheckShadow(&s3, sid, epoch, 0, 8, kAccessRead | kAccessAtomic);
58b3321349SDmitry Vyukov 
59b3321349SDmitry Vyukov   CHECK(!s0.IsBothReadsOrAtomic(kAccessRead | kAccessAtomic));
60b3321349SDmitry Vyukov   CHECK(!s1.IsBothReadsOrAtomic(kAccessAtomic));
61b3321349SDmitry Vyukov   CHECK(!s1.IsBothReadsOrAtomic(kAccessWrite));
62b3321349SDmitry Vyukov   CHECK(s1.IsBothReadsOrAtomic(kAccessRead));
63b3321349SDmitry Vyukov   CHECK(s2.IsBothReadsOrAtomic(kAccessAtomic));
64b3321349SDmitry Vyukov   CHECK(!s2.IsBothReadsOrAtomic(kAccessWrite));
65b3321349SDmitry Vyukov   CHECK(!s2.IsBothReadsOrAtomic(kAccessRead));
66b3321349SDmitry Vyukov   CHECK(s3.IsBothReadsOrAtomic(kAccessAtomic));
67b3321349SDmitry Vyukov   CHECK(!s3.IsBothReadsOrAtomic(kAccessWrite));
68b3321349SDmitry Vyukov   CHECK(s3.IsBothReadsOrAtomic(kAccessRead));
69b3321349SDmitry Vyukov 
70b3321349SDmitry Vyukov   CHECK(!s0.IsRWWeakerOrEqual(kAccessRead | kAccessAtomic));
71b3321349SDmitry Vyukov   CHECK(s1.IsRWWeakerOrEqual(kAccessWrite));
72b3321349SDmitry Vyukov   CHECK(s1.IsRWWeakerOrEqual(kAccessRead));
73b3321349SDmitry Vyukov   CHECK(!s1.IsRWWeakerOrEqual(kAccessWrite | kAccessAtomic));
74b3321349SDmitry Vyukov 
75b3321349SDmitry Vyukov   CHECK(!s2.IsRWWeakerOrEqual(kAccessRead | kAccessAtomic));
76b3321349SDmitry Vyukov   CHECK(s2.IsRWWeakerOrEqual(kAccessWrite | kAccessAtomic));
77b3321349SDmitry Vyukov   CHECK(s2.IsRWWeakerOrEqual(kAccessRead));
78b3321349SDmitry Vyukov   CHECK(s2.IsRWWeakerOrEqual(kAccessWrite));
79b3321349SDmitry Vyukov 
80b3321349SDmitry Vyukov   CHECK(s3.IsRWWeakerOrEqual(kAccessRead | kAccessAtomic));
81b3321349SDmitry Vyukov   CHECK(s3.IsRWWeakerOrEqual(kAccessWrite | kAccessAtomic));
82b3321349SDmitry Vyukov   CHECK(s3.IsRWWeakerOrEqual(kAccessRead));
83b3321349SDmitry Vyukov   CHECK(s3.IsRWWeakerOrEqual(kAccessWrite));
84b3321349SDmitry Vyukov 
85b3321349SDmitry Vyukov   Shadow sro(Shadow::kRodata);
86b3321349SDmitry Vyukov   CheckShadow(&sro, static_cast<Sid>(0), kEpochZero, 0, 0, kAccessRead);
8779fbba9bSDmitry Vyukov }
8879fbba9bSDmitry Vyukov 
TEST(Shadow,Mapping)895de29a4bSNico Weber TEST(Shadow, Mapping) {
905de29a4bSNico Weber   static int global;
915de29a4bSNico Weber   int stack;
925de29a4bSNico Weber   void *heap = malloc(0);
935de29a4bSNico Weber   free(heap);
945de29a4bSNico Weber 
955de29a4bSNico Weber   CHECK(IsAppMem((uptr)&global));
965de29a4bSNico Weber   CHECK(IsAppMem((uptr)&stack));
975de29a4bSNico Weber   CHECK(IsAppMem((uptr)heap));
985de29a4bSNico Weber 
995de29a4bSNico Weber   CHECK(IsShadowMem(MemToShadow((uptr)&global)));
1005de29a4bSNico Weber   CHECK(IsShadowMem(MemToShadow((uptr)&stack)));
1015de29a4bSNico Weber   CHECK(IsShadowMem(MemToShadow((uptr)heap)));
1025de29a4bSNico Weber }
1035de29a4bSNico Weber 
TEST(Shadow,Celling)1045de29a4bSNico Weber TEST(Shadow, Celling) {
1055de29a4bSNico Weber   u64 aligned_data[4];
1065de29a4bSNico Weber   char *data = (char*)aligned_data;
107a82c7476SDmitry Vyukov   CHECK(IsAligned(reinterpret_cast<uptr>(data), kShadowSize));
108a82c7476SDmitry Vyukov   RawShadow *s0 = MemToShadow((uptr)&data[0]);
109a82c7476SDmitry Vyukov   CHECK(IsAligned(reinterpret_cast<uptr>(s0), kShadowSize));
1105de29a4bSNico Weber   for (unsigned i = 1; i < kShadowCell; i++)
1115de29a4bSNico Weber     CHECK_EQ(s0, MemToShadow((uptr)&data[i]));
1125de29a4bSNico Weber   for (unsigned i = kShadowCell; i < 2*kShadowCell; i++)
113a82c7476SDmitry Vyukov     CHECK_EQ(s0 + kShadowCnt, MemToShadow((uptr)&data[i]));
1145de29a4bSNico Weber   for (unsigned i = 2*kShadowCell; i < 3*kShadowCell; i++)
115a82c7476SDmitry Vyukov     CHECK_EQ(s0 + 2 * kShadowCnt, MemToShadow((uptr)&data[i]));
1165de29a4bSNico Weber }
1175de29a4bSNico Weber 
1180ebfe7c3SDmitry Vyukov // Detect is the Mapping has kBroken field.
1190ebfe7c3SDmitry Vyukov template <uptr>
1200ebfe7c3SDmitry Vyukov struct Has {
1210ebfe7c3SDmitry Vyukov   typedef bool Result;
1220ebfe7c3SDmitry Vyukov };
1230ebfe7c3SDmitry Vyukov 
1240ebfe7c3SDmitry Vyukov template <typename Mapping>
broken(...)1250ebfe7c3SDmitry Vyukov bool broken(...) {
1260ebfe7c3SDmitry Vyukov   return false;
1270ebfe7c3SDmitry Vyukov }
1280ebfe7c3SDmitry Vyukov 
1290ebfe7c3SDmitry Vyukov template <typename Mapping>
broken(uptr what,typename Has<Mapping::kBroken>::Result=false)1300ebfe7c3SDmitry Vyukov bool broken(uptr what, typename Has<Mapping::kBroken>::Result = false) {
1310ebfe7c3SDmitry Vyukov   return Mapping::kBroken & what;
1320ebfe7c3SDmitry Vyukov }
1330ebfe7c3SDmitry Vyukov 
CompareRegion(const void * region_a,const void * region_b)134682d635aSThurston Dang static int CompareRegion(const void *region_a, const void *region_b) {
135*a7cf694cSThurston Dang   uptr start_a = ((const struct Region *)region_a)->start;
136*a7cf694cSThurston Dang   uptr start_b = ((const struct Region *)region_b)->start;
137682d635aSThurston Dang 
138682d635aSThurston Dang   if (start_a < start_b) {
139682d635aSThurston Dang     return -1;
140682d635aSThurston Dang   } else if (start_a > start_b) {
141682d635aSThurston Dang     return 1;
142682d635aSThurston Dang   } else {
143682d635aSThurston Dang     return 0;
144682d635aSThurston Dang   }
145682d635aSThurston Dang }
146682d635aSThurston Dang 
147682d635aSThurston Dang template <typename Mapping>
AddMetaRegion(struct Region * shadows,int * num_regions,uptr start,uptr end)148682d635aSThurston Dang static void AddMetaRegion(struct Region *shadows, int *num_regions, uptr start,
149682d635aSThurston Dang                           uptr end) {
150682d635aSThurston Dang   // If the app region is not empty, add its meta to the array.
151682d635aSThurston Dang   if (start != end) {
152682d635aSThurston Dang     shadows[*num_regions].start = (uptr)MemToMetaImpl::Apply<Mapping>(start);
153682d635aSThurston Dang     shadows[*num_regions].end = (uptr)MemToMetaImpl::Apply<Mapping>(end - 1);
154682d635aSThurston Dang     *num_regions = (*num_regions) + 1;
155682d635aSThurston Dang   }
156682d635aSThurston Dang }
157682d635aSThurston Dang 
1580ebfe7c3SDmitry Vyukov struct MappingTest {
1590ebfe7c3SDmitry Vyukov   template <typename Mapping>
Apply__tsan::MappingTest1600ebfe7c3SDmitry Vyukov   static void Apply() {
1610ebfe7c3SDmitry Vyukov     // Easy (but ugly) way to print the mapping name.
1620ebfe7c3SDmitry Vyukov     Printf("%s\n", __PRETTY_FUNCTION__);
1630ebfe7c3SDmitry Vyukov     TestRegion<Mapping>(Mapping::kLoAppMemBeg, Mapping::kLoAppMemEnd);
1640ebfe7c3SDmitry Vyukov     TestRegion<Mapping>(Mapping::kMidAppMemBeg, Mapping::kMidAppMemEnd);
1650ebfe7c3SDmitry Vyukov     TestRegion<Mapping>(Mapping::kHiAppMemBeg, Mapping::kHiAppMemEnd);
1660ebfe7c3SDmitry Vyukov     TestRegion<Mapping>(Mapping::kHeapMemBeg, Mapping::kHeapMemEnd);
167682d635aSThurston Dang 
168682d635aSThurston Dang     TestDisjointMetas<Mapping>();
169682d635aSThurston Dang 
170682d635aSThurston Dang     // Not tested: the ordering of regions (low app vs. shadow vs. mid app
171682d635aSThurston Dang     // etc.). That is enforced at runtime by CheckAndProtect.
1720ebfe7c3SDmitry Vyukov   }
1730ebfe7c3SDmitry Vyukov 
1740ebfe7c3SDmitry Vyukov   template <typename Mapping>
TestRegion__tsan::MappingTest1750ebfe7c3SDmitry Vyukov   static void TestRegion(uptr beg, uptr end) {
1760ebfe7c3SDmitry Vyukov     if (beg == end)
1770ebfe7c3SDmitry Vyukov       return;
178c90bf3ffSDmitry Vyukov     Printf("checking region [0x%zx-0x%zx)\n", beg, end);
1790ebfe7c3SDmitry Vyukov     uptr prev = 0;
1800ebfe7c3SDmitry Vyukov     for (uptr p0 = beg; p0 <= end; p0 += (end - beg) / 256) {
1810ebfe7c3SDmitry Vyukov       for (int x = -(int)kShadowCell; x <= (int)kShadowCell; x += kShadowCell) {
1820ebfe7c3SDmitry Vyukov         const uptr p = RoundDown(p0 + x, kShadowCell);
1830ebfe7c3SDmitry Vyukov         if (p < beg || p >= end)
1840ebfe7c3SDmitry Vyukov           continue;
1850ebfe7c3SDmitry Vyukov         const uptr s = MemToShadowImpl::Apply<Mapping>(p);
1860ebfe7c3SDmitry Vyukov         u32 *const m = MemToMetaImpl::Apply<Mapping>(p);
1870ebfe7c3SDmitry Vyukov         const uptr r = ShadowToMemImpl::Apply<Mapping>(s);
188c90bf3ffSDmitry Vyukov         Printf("  addr=0x%zx: shadow=0x%zx meta=%p reverse=0x%zx\n", p, s, m,
189c90bf3ffSDmitry Vyukov                r);
1900ebfe7c3SDmitry Vyukov         CHECK(IsAppMemImpl::Apply<Mapping>(p));
1910ebfe7c3SDmitry Vyukov         if (!broken<Mapping>(kBrokenMapping))
1920ebfe7c3SDmitry Vyukov           CHECK(IsShadowMemImpl::Apply<Mapping>(s));
1930ebfe7c3SDmitry Vyukov         CHECK(IsMetaMemImpl::Apply<Mapping>(reinterpret_cast<uptr>(m)));
1948233c343SDmitry Vyukov         CHECK_EQ(p, RestoreAddrImpl::Apply<Mapping>(CompressAddr(p)));
1950ebfe7c3SDmitry Vyukov         if (!broken<Mapping>(kBrokenReverseMapping))
1960ebfe7c3SDmitry Vyukov           CHECK_EQ(p, r);
1970ebfe7c3SDmitry Vyukov         if (prev && !broken<Mapping>(kBrokenLinearity)) {
1980ebfe7c3SDmitry Vyukov           // Ensure that shadow and meta mappings are linear within a single
1990ebfe7c3SDmitry Vyukov           // user range. Lots of code that processes memory ranges assumes it.
2000ebfe7c3SDmitry Vyukov           const uptr prev_s = MemToShadowImpl::Apply<Mapping>(prev);
2010ebfe7c3SDmitry Vyukov           u32 *const prev_m = MemToMetaImpl::Apply<Mapping>(prev);
2020ebfe7c3SDmitry Vyukov           CHECK_EQ(s - prev_s, (p - prev) * kShadowMultiplier);
2030ebfe7c3SDmitry Vyukov           CHECK_EQ(m - prev_m, (p - prev) / kMetaShadowCell);
2040ebfe7c3SDmitry Vyukov         }
2050ebfe7c3SDmitry Vyukov         prev = p;
2060ebfe7c3SDmitry Vyukov       }
2070ebfe7c3SDmitry Vyukov     }
2080ebfe7c3SDmitry Vyukov   }
209682d635aSThurston Dang 
210682d635aSThurston Dang   template <typename Mapping>
TestDisjointMetas__tsan::MappingTest211682d635aSThurston Dang   static void TestDisjointMetas() {
212682d635aSThurston Dang     // Checks that the meta for each app region does not overlap with
213682d635aSThurston Dang     // the meta for other app regions. For example, the meta for a high
214682d635aSThurston Dang     // app pointer shouldn't be aliased to the meta of a mid app pointer.
215682d635aSThurston Dang     // Notice that this is important even though there does not exist a
216682d635aSThurston Dang     // MetaToMem function.
217682d635aSThurston Dang     // (If a MetaToMem function did exist, we could simply
218682d635aSThurston Dang     // check in the TestRegion function that it inverts MemToMeta.)
219682d635aSThurston Dang     //
220682d635aSThurston Dang     // We don't try to be clever by allowing the non-PIE (low app)
221682d635aSThurston Dang     // and PIE (mid and high app) meta regions to overlap.
222682d635aSThurston Dang     struct Region metas[4];
223682d635aSThurston Dang     int num_regions = 0;
224682d635aSThurston Dang     AddMetaRegion<Mapping>(metas, &num_regions, Mapping::kLoAppMemBeg,
225682d635aSThurston Dang                            Mapping::kLoAppMemEnd);
226682d635aSThurston Dang     AddMetaRegion<Mapping>(metas, &num_regions, Mapping::kMidAppMemBeg,
227682d635aSThurston Dang                            Mapping::kMidAppMemEnd);
228682d635aSThurston Dang     AddMetaRegion<Mapping>(metas, &num_regions, Mapping::kHiAppMemBeg,
229682d635aSThurston Dang                            Mapping::kHiAppMemEnd);
230682d635aSThurston Dang     AddMetaRegion<Mapping>(metas, &num_regions, Mapping::kHeapMemBeg,
231682d635aSThurston Dang                            Mapping::kHeapMemEnd);
232682d635aSThurston Dang 
233682d635aSThurston Dang     // It is not required that the low app shadow is below the mid app
234682d635aSThurston Dang     // shadow etc., hence we sort the shadows.
235682d635aSThurston Dang     qsort(metas, num_regions, sizeof(struct Region), CompareRegion);
236682d635aSThurston Dang 
237682d635aSThurston Dang     for (int i = 0; i < num_regions; i++)
238682d635aSThurston Dang       Printf("[0x%lu, 0x%lu]\n", metas[i].start, metas[i].end);
239682d635aSThurston Dang 
240682d635aSThurston Dang     if (!broken<Mapping>(kBrokenAliasedMetas))
241682d635aSThurston Dang       for (int i = 1; i < num_regions; i++)
242682d635aSThurston Dang         CHECK(metas[i - 1].end <= metas[i].start);
243682d635aSThurston Dang   }
2440ebfe7c3SDmitry Vyukov };
2450ebfe7c3SDmitry Vyukov 
TEST(Shadow,AllMappings)2460ebfe7c3SDmitry Vyukov TEST(Shadow, AllMappings) { ForEachMapping<MappingTest>(); }
2470ebfe7c3SDmitry Vyukov 
2485de29a4bSNico Weber }  // namespace __tsan
249