1 //===- FuzzerTracePC.h - Internal header for the Fuzzer ---------*- C++ -* ===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 // fuzzer::TracePC
10 //===----------------------------------------------------------------------===//
11
12 #ifndef LLVM_FUZZER_TRACE_PC
13 #define LLVM_FUZZER_TRACE_PC
14
15 #include "FuzzerDefs.h"
16 #include "FuzzerDictionary.h"
17 #include "FuzzerValueBitMap.h"
18
19 #include <set>
20 #include <unordered_map>
21
22 namespace fuzzer {
23
24 // TableOfRecentCompares (TORC) remembers the most recently performed
25 // comparisons of type T.
26 // We record the arguments of CMP instructions in this table unconditionally
27 // because it seems cheaper this way than to compute some expensive
28 // conditions inside __sanitizer_cov_trace_cmp*.
29 // After the unit has been executed we may decide to use the contents of
30 // this table to populate a Dictionary.
31 template<class T, size_t kSizeT>
32 struct TableOfRecentCompares {
33 static const size_t kSize = kSizeT;
34 struct Pair {
35 T A, B;
36 };
37 ATTRIBUTE_NO_SANITIZE_ALL
InsertTableOfRecentCompares38 void Insert(size_t Idx, const T &Arg1, const T &Arg2) {
39 Idx = Idx % kSize;
40 Table[Idx].A = Arg1;
41 Table[Idx].B = Arg2;
42 }
43
GetTableOfRecentCompares44 Pair Get(size_t I) { return Table[I % kSize]; }
45
46 Pair Table[kSize];
47 };
48
49 template <size_t kSizeT>
50 struct MemMemTable {
51 static const size_t kSize = kSizeT;
52 Word MemMemWords[kSize];
53 Word EmptyWord;
54
AddMemMemTable55 void Add(const uint8_t *Data, size_t Size) {
56 if (Size <= 2) return;
57 Size = std::min(Size, Word::GetMaxSize());
58 size_t Idx = SimpleFastHash(Data, Size) % kSize;
59 MemMemWords[Idx].Set(Data, Size);
60 }
GetMemMemTable61 const Word &Get(size_t Idx) {
62 for (size_t i = 0; i < kSize; i++) {
63 const Word &W = MemMemWords[(Idx + i) % kSize];
64 if (W.size()) return W;
65 }
66 EmptyWord.Set(nullptr, 0);
67 return EmptyWord;
68 }
69 };
70
71 class TracePC {
72 public:
73 static const size_t kNumPCs = 1 << 21;
74 // How many bits of PC are used from __sanitizer_cov_trace_pc.
75 static const size_t kTracePcBits = 18;
76
77 enum HandleUnstableOptions {
78 MinUnstable = 1,
79 ZeroUnstable = 2,
80 };
81
82 void HandleInit(uint32_t *Start, uint32_t *Stop);
83 void HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop);
84 void HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop);
85 void HandleCallerCallee(uintptr_t Caller, uintptr_t Callee);
86 template <class T> void HandleCmp(uintptr_t PC, T Arg1, T Arg2);
87 size_t GetTotalPCCoverage();
SetUseCounters(bool UC)88 void SetUseCounters(bool UC) { UseCounters = UC; }
SetUseValueProfileMask(uint32_t VPMask)89 void SetUseValueProfileMask(uint32_t VPMask) { UseValueProfileMask = VPMask; }
SetPrintNewPCs(bool P)90 void SetPrintNewPCs(bool P) { DoPrintNewPCs = P; }
SetPrintNewFuncs(size_t P)91 void SetPrintNewFuncs(size_t P) { NumPrintNewFuncs = P; }
92 void UpdateObservedPCs();
93 template <class Callback> void CollectFeatures(Callback CB) const;
94
ResetMaps()95 void ResetMaps() {
96 ValueProfileMap.Reset();
97 if (NumModules)
98 memset(Counters(), 0, GetNumPCs());
99 ClearExtraCounters();
100 ClearInlineCounters();
101 }
102
103 void ClearInlineCounters();
104
105 void UpdateFeatureSet(size_t CurrentElementIdx, size_t CurrentElementSize);
106 void PrintFeatureSet();
107
108 void PrintModuleInfo();
109
110 void PrintCoverage();
111 void DumpCoverage();
112 void PrintUnstableStats();
113
114 template<class CallBack>
115 void IterateCoveredFunctions(CallBack CB);
116
117 void AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
118 size_t n, bool StopAtZero);
119
120 TableOfRecentCompares<uint32_t, 32> TORC4;
121 TableOfRecentCompares<uint64_t, 32> TORC8;
122 TableOfRecentCompares<Word, 32> TORCW;
123 MemMemTable<1024> MMT;
124
GetNumPCs()125 size_t GetNumPCs() const {
126 return NumGuards == 0 ? (1 << kTracePcBits) : Min(kNumPCs, NumGuards + 1);
127 }
GetPC(size_t Idx)128 uintptr_t GetPC(size_t Idx) {
129 assert(Idx < GetNumPCs());
130 return PCs()[Idx];
131 }
132
133 void RecordInitialStack();
134 uintptr_t GetMaxStackOffset() const;
135
136 template<class CallBack>
ForEachObservedPC(CallBack CB)137 void ForEachObservedPC(CallBack CB) {
138 for (auto PC : ObservedPCs)
139 CB(PC);
140 }
141
142 void SetFocusFunction(const std::string &FuncName);
143 bool ObservedFocusFunction();
144
145 void InitializeUnstableCounters();
146 bool UpdateUnstableCounters(int UnstableMode);
147 void UpdateAndApplyUnstableCounters(int UnstableMode);
148
149 private:
150 struct UnstableEdge {
151 uint8_t Counter;
152 bool IsUnstable;
153 };
154
155 UnstableEdge UnstableCounters[kNumPCs];
156
157 bool UseCounters = false;
158 uint32_t UseValueProfileMask = false;
159 bool DoPrintNewPCs = false;
160 size_t NumPrintNewFuncs = 0;
161
162 struct Module {
163 uint32_t *Start, *Stop;
164 };
165
166 Module Modules[4096];
167 size_t NumModules; // linker-initialized.
168 size_t NumGuards; // linker-initialized.
169
170 struct { uint8_t *Start, *Stop; } ModuleCounters[4096];
171 size_t NumModulesWithInline8bitCounters; // linker-initialized.
172 size_t NumInline8bitCounters;
173
174 struct PCTableEntry {
175 uintptr_t PC, PCFlags;
176 };
177
178 struct { const PCTableEntry *Start, *Stop; } ModulePCTable[4096];
179 size_t NumPCTables;
180 size_t NumPCsInPCTables;
181
182 uint8_t *Counters() const;
183 uintptr_t *PCs() const;
184
185 Set<uintptr_t> ObservedPCs;
186 std::unordered_map<uintptr_t, uintptr_t> ObservedFuncs; // PC => Counter.
187
188 template <class Callback>
189 void IterateInline8bitCounters(Callback CB) const;
190
191 std::pair<size_t, size_t> FocusFunction = {-1, -1}; // Module and PC IDs.
192
193 ValueBitMap ValueProfileMap;
194 uintptr_t InitialStack;
195 };
196
197 template <class Callback>
198 // void Callback(size_t FirstFeature, size_t Idx, uint8_t Value);
199 ATTRIBUTE_NO_SANITIZE_ALL
ForEachNonZeroByte(const uint8_t * Begin,const uint8_t * End,size_t FirstFeature,Callback Handle8bitCounter)200 void ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End,
201 size_t FirstFeature, Callback Handle8bitCounter) {
202 typedef uintptr_t LargeType;
203 const size_t Step = sizeof(LargeType) / sizeof(uint8_t);
204 const size_t StepMask = Step - 1;
205 auto P = Begin;
206 // Iterate by 1 byte until either the alignment boundary or the end.
207 for (; reinterpret_cast<uintptr_t>(P) & StepMask && P < End; P++)
208 if (uint8_t V = *P)
209 Handle8bitCounter(FirstFeature, P - Begin, V);
210
211 // Iterate by Step bytes at a time.
212 for (; P < End; P += Step)
213 if (LargeType Bundle = *reinterpret_cast<const LargeType *>(P))
214 for (size_t I = 0; I < Step; I++, Bundle >>= 8)
215 if (uint8_t V = Bundle & 0xff)
216 Handle8bitCounter(FirstFeature, P - Begin + I, V);
217
218 // Iterate by 1 byte until the end.
219 for (; P < End; P++)
220 if (uint8_t V = *P)
221 Handle8bitCounter(FirstFeature, P - Begin, V);
222 }
223
224 // Given a non-zero Counter returns a number in the range [0,7].
225 template<class T>
CounterToFeature(T Counter)226 unsigned CounterToFeature(T Counter) {
227 // Returns a feature number by placing Counters into buckets as illustrated
228 // below.
229 //
230 // Counter bucket: [1] [2] [3] [4-7] [8-15] [16-31] [32-127] [128+]
231 // Feature number: 0 1 2 3 4 5 6 7
232 //
233 // This is a heuristic taken from AFL (see
234 // http://lcamtuf.coredump.cx/afl/technical_details.txt).
235 //
236 // This implementation may change in the future so clients should
237 // not rely on it.
238 assert(Counter);
239 unsigned Bit = 0;
240 /**/ if (Counter >= 128) Bit = 7;
241 else if (Counter >= 32) Bit = 6;
242 else if (Counter >= 16) Bit = 5;
243 else if (Counter >= 8) Bit = 4;
244 else if (Counter >= 4) Bit = 3;
245 else if (Counter >= 3) Bit = 2;
246 else if (Counter >= 2) Bit = 1;
247 return Bit;
248 }
249
250 template <class Callback> // void Callback(size_t Feature)
251 ATTRIBUTE_NO_SANITIZE_ADDRESS
252 __attribute__((noinline))
CollectFeatures(Callback HandleFeature)253 void TracePC::CollectFeatures(Callback HandleFeature) const {
254 uint8_t *Counters = this->Counters();
255 size_t N = GetNumPCs();
256 auto Handle8bitCounter = [&](size_t FirstFeature,
257 size_t Idx, uint8_t Counter) {
258 if (UseCounters)
259 HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter));
260 else
261 HandleFeature(FirstFeature + Idx);
262 };
263
264 size_t FirstFeature = 0;
265
266 if (!NumInline8bitCounters) {
267 ForEachNonZeroByte(Counters, Counters + N, FirstFeature, Handle8bitCounter);
268 FirstFeature += N * 8;
269 }
270
271 if (NumInline8bitCounters) {
272 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
273 ForEachNonZeroByte(ModuleCounters[i].Start, ModuleCounters[i].Stop,
274 FirstFeature, Handle8bitCounter);
275 FirstFeature += 8 * (ModuleCounters[i].Stop - ModuleCounters[i].Start);
276 }
277 }
278
279 ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature,
280 Handle8bitCounter);
281 FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8;
282
283 if (UseValueProfileMask) {
284 ValueProfileMap.ForEach([&](size_t Idx) {
285 HandleFeature(FirstFeature + Idx);
286 });
287 FirstFeature += ValueProfileMap.SizeInBits();
288 }
289
290 // Step function, grows similar to 8 * Log_2(A).
291 auto StackDepthStepFunction = [](uint32_t A) -> uint32_t {
292 if (!A) return A;
293 uint32_t Log2 = Log(A);
294 if (Log2 < 3) return A;
295 Log2 -= 3;
296 return (Log2 + 1) * 8 + ((A >> Log2) & 7);
297 };
298 assert(StackDepthStepFunction(1024) == 64);
299 assert(StackDepthStepFunction(1024 * 4) == 80);
300 assert(StackDepthStepFunction(1024 * 1024) == 144);
301
302 if (auto MaxStackOffset = GetMaxStackOffset())
303 HandleFeature(FirstFeature + StackDepthStepFunction(MaxStackOffset / 8));
304 }
305
306 extern TracePC TPC;
307
308 } // namespace fuzzer
309
310 #endif // LLVM_FUZZER_TRACE_PC
311