xref: /llvm-project/compiler-rt/lib/tsan/tests/unit/tsan_sync_test.cpp (revision b332134921b42796c6b46453eaf2affdc09e3154)
1 //===-- tsan_sync_test.cpp ------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //
11 //===----------------------------------------------------------------------===//
12 #include "tsan_sync.h"
13 #include "tsan_rtl.h"
14 #include "gtest/gtest.h"
15 
16 namespace __tsan {
17 
TEST(MetaMap,Basic)18 TEST(MetaMap, Basic) {
19   ThreadState *thr = cur_thread();
20   SlotLocker locker(thr);
21   MetaMap *m = &ctx->metamap;
22   u64 block[1] = {};  // fake malloc block
23   m->AllocBlock(thr, 0, (uptr)&block[0], 1 * sizeof(u64));
24   MBlock *mb = m->GetBlock((uptr)&block[0]);
25   CHECK_NE(mb, (MBlock *)0);
26   CHECK_EQ(mb->siz, 1 * sizeof(u64));
27   CHECK_EQ(mb->tid, thr->tid);
28   uptr sz = m->FreeBlock(thr->proc(), (uptr)&block[0], true);
29   CHECK_EQ(sz, 1 * sizeof(u64));
30   mb = m->GetBlock((uptr)&block[0]);
31   CHECK_EQ(mb, (MBlock *)0);
32 }
33 
TEST(MetaMap,FreeRange)34 TEST(MetaMap, FreeRange) {
35   ThreadState *thr = cur_thread();
36   SlotLocker locker(thr);
37   MetaMap *m = &ctx->metamap;
38   u64 block[4] = {};  // fake malloc block
39   m->AllocBlock(thr, 0, (uptr)&block[0], 1 * sizeof(u64));
40   m->AllocBlock(thr, 0, (uptr)&block[1], 3 * sizeof(u64));
41   MBlock *mb1 = m->GetBlock((uptr)&block[0]);
42   CHECK_EQ(mb1->siz, 1 * sizeof(u64));
43   MBlock *mb2 = m->GetBlock((uptr)&block[1]);
44   CHECK_EQ(mb2->siz, 3 * sizeof(u64));
45   m->FreeRange(thr->proc(), (uptr)&block[0], 4 * sizeof(u64), true);
46   mb1 = m->GetBlock((uptr)&block[0]);
47   CHECK_EQ(mb1, (MBlock *)0);
48   mb2 = m->GetBlock((uptr)&block[1]);
49   CHECK_EQ(mb2, (MBlock *)0);
50 }
51 
TEST(MetaMap,Sync)52 TEST(MetaMap, Sync) {
53   // CHECK can call memset/etc. Disable interceptors to prevent
54   // them from detecting that we exit runtime with mutexes held.
55   ScopedIgnoreInterceptors ignore;
56   ThreadState *thr = cur_thread();
57   SlotLocker locker(thr);
58   MetaMap *m = &ctx->metamap;
59   u64 block[4] = {};  // fake malloc block
60   m->AllocBlock(thr, 0, (uptr)&block[0], 4 * sizeof(u64));
61   SyncVar *s1 = m->GetSyncIfExists((uptr)&block[0]);
62   CHECK_EQ(s1, (SyncVar *)0);
63   s1 = m->GetSyncOrCreate(thr, 0, (uptr)&block[0], false);
64   CHECK_NE(s1, (SyncVar *)0);
65   CHECK_EQ(s1->addr, (uptr)&block[0]);
66   SyncVar *s2 = m->GetSyncOrCreate(thr, 0, (uptr)&block[1], false);
67   CHECK_NE(s2, (SyncVar *)0);
68   CHECK_EQ(s2->addr, (uptr)&block[1]);
69   m->FreeBlock(thr->proc(), (uptr)&block[0], true);
70   s1 = m->GetSyncIfExists((uptr)&block[0]);
71   CHECK_EQ(s1, (SyncVar *)0);
72   s2 = m->GetSyncIfExists((uptr)&block[1]);
73   CHECK_EQ(s2, (SyncVar *)0);
74   m->OnProcIdle(thr->proc());
75 }
76 
TEST(MetaMap,MoveMemory)77 TEST(MetaMap, MoveMemory) {
78   ScopedIgnoreInterceptors ignore;
79   ThreadState *thr = cur_thread();
80   SlotLocker locker(thr);
81   MetaMap *m = &ctx->metamap;
82   u64 block1[4] = {};  // fake malloc block
83   u64 block2[4] = {};  // fake malloc block
84   m->AllocBlock(thr, 0, (uptr)&block1[0], 3 * sizeof(u64));
85   m->AllocBlock(thr, 0, (uptr)&block1[3], 1 * sizeof(u64));
86   SyncVar *s1 = m->GetSyncOrCreate(thr, 0, (uptr)&block1[0], false);
87   SyncVar *s2 = m->GetSyncOrCreate(thr, 0, (uptr)&block1[1], false);
88   m->MoveMemory((uptr)&block1[0], (uptr)&block2[0], 4 * sizeof(u64));
89   MBlock *mb1 = m->GetBlock((uptr)&block1[0]);
90   CHECK_EQ(mb1, (MBlock *)0);
91   MBlock *mb2 = m->GetBlock((uptr)&block1[3]);
92   CHECK_EQ(mb2, (MBlock *)0);
93   mb1 = m->GetBlock((uptr)&block2[0]);
94   CHECK_NE(mb1, (MBlock *)0);
95   CHECK_EQ(mb1->siz, 3 * sizeof(u64));
96   mb2 = m->GetBlock((uptr)&block2[3]);
97   CHECK_NE(mb2, (MBlock *)0);
98   CHECK_EQ(mb2->siz, 1 * sizeof(u64));
99   s1 = m->GetSyncIfExists((uptr)&block1[0]);
100   CHECK_EQ(s1, (SyncVar *)0);
101   s2 = m->GetSyncIfExists((uptr)&block1[1]);
102   CHECK_EQ(s2, (SyncVar *)0);
103   s1 = m->GetSyncIfExists((uptr)&block2[0]);
104   CHECK_NE(s1, (SyncVar *)0);
105   CHECK_EQ(s1->addr, (uptr)&block2[0]);
106   s2 = m->GetSyncIfExists((uptr)&block2[1]);
107   CHECK_NE(s2, (SyncVar *)0);
108   CHECK_EQ(s2->addr, (uptr)&block2[1]);
109   m->FreeRange(thr->proc(), (uptr)&block2[0], 4 * sizeof(u64), true);
110 }
111 
TEST(MetaMap,ResetSync)112 TEST(MetaMap, ResetSync) {
113   ScopedIgnoreInterceptors ignore;
114   ThreadState *thr = cur_thread();
115   SlotLocker locker(thr);
116   MetaMap *m = &ctx->metamap;
117   u64 block[1] = {};  // fake malloc block
118   m->AllocBlock(thr, 0, (uptr)&block[0], 1 * sizeof(u64));
119   SyncVar *s = m->GetSyncOrCreate(thr, 0, (uptr)&block[0], false);
120   s->Reset();
121   uptr sz = m->FreeBlock(thr->proc(), (uptr)&block[0], true);
122   CHECK_EQ(sz, 1 * sizeof(u64));
123 }
124 
125 }  // namespace __tsan
126