1 //===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 // Trace PCs.
10 // This module implements __sanitizer_cov_trace_pc_guard[_init],
11 // the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "FuzzerTracePC.h"
16 #include "FuzzerCorpus.h"
17 #include "FuzzerDefs.h"
18 #include "FuzzerDictionary.h"
19 #include "FuzzerExtFunctions.h"
20 #include "FuzzerIO.h"
21 #include "FuzzerUtil.h"
22 #include "FuzzerValueBitMap.h"
23 #include <set>
24
25 // The coverage counters and PCs.
26 // These are declared as global variables named "__sancov_*" to simplify
27 // experiments with inlined instrumentation.
28 alignas(64) ATTRIBUTE_INTERFACE
29 uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30
31 ATTRIBUTE_INTERFACE
32 uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33
34 // Used by -fsanitize-coverage=stack-depth to track stack depth
35 ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
36
37 namespace fuzzer {
38
39 TracePC TPC;
40
Counters() const41 uint8_t *TracePC::Counters() const {
42 return __sancov_trace_pc_guard_8bit_counters;
43 }
44
PCs() const45 uintptr_t *TracePC::PCs() const {
46 return __sancov_trace_pc_pcs;
47 }
48
GetTotalPCCoverage()49 size_t TracePC::GetTotalPCCoverage() {
50 if (ObservedPCs.size())
51 return ObservedPCs.size();
52 size_t Res = 0;
53 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
54 if (PCs()[i])
55 Res++;
56 return Res;
57 }
58
59 template<class CallBack>
IterateInline8bitCounters(CallBack CB) const60 void TracePC::IterateInline8bitCounters(CallBack CB) const {
61 if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
62 size_t CounterIdx = 0;
63 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
64 uint8_t *Beg = ModuleCounters[i].Start;
65 size_t Size = ModuleCounters[i].Stop - Beg;
66 assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
67 for (size_t j = 0; j < Size; j++, CounterIdx++)
68 CB(i, j, CounterIdx);
69 }
70 }
71 }
72
73 // Initializes unstable counters by copying Inline8bitCounters to unstable
74 // counters.
InitializeUnstableCounters()75 void TracePC::InitializeUnstableCounters() {
76 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
77 UnstableCounters[UnstableIdx].Counter = ModuleCounters[i].Start[j];
78 });
79 }
80
81 // Compares the current counters with counters from previous runs
82 // and records differences as unstable edges.
UpdateUnstableCounters(int UnstableMode)83 bool TracePC::UpdateUnstableCounters(int UnstableMode) {
84 bool Updated = false;
85 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
86 if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
87 Updated = true;
88 UnstableCounters[UnstableIdx].IsUnstable = true;
89 if (UnstableMode == ZeroUnstable)
90 UnstableCounters[UnstableIdx].Counter = 0;
91 else if (UnstableMode == MinUnstable)
92 UnstableCounters[UnstableIdx].Counter = std::min(
93 ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
94 }
95 });
96 return Updated;
97 }
98
99 // Updates and applies unstable counters to ModuleCounters in single iteration
UpdateAndApplyUnstableCounters(int UnstableMode)100 void TracePC::UpdateAndApplyUnstableCounters(int UnstableMode) {
101 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
102 if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
103 UnstableCounters[UnstableIdx].IsUnstable = true;
104 if (UnstableMode == ZeroUnstable)
105 ModuleCounters[i].Start[j] = 0;
106 else if (UnstableMode == MinUnstable)
107 ModuleCounters[i].Start[j] = std::min(
108 ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
109 }
110 });
111 }
112
HandleInline8bitCountersInit(uint8_t * Start,uint8_t * Stop)113 void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
114 if (Start == Stop) return;
115 if (NumModulesWithInline8bitCounters &&
116 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
117 assert(NumModulesWithInline8bitCounters <
118 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
119 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
120 NumInline8bitCounters += Stop - Start;
121 }
122
HandlePCsInit(const uintptr_t * Start,const uintptr_t * Stop)123 void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
124 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
125 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
126 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
127 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
128 ModulePCTable[NumPCTables++] = {B, E};
129 NumPCsInPCTables += E - B;
130 }
131
HandleInit(uint32_t * Start,uint32_t * Stop)132 void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
133 if (Start == Stop || *Start) return;
134 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
135 for (uint32_t *P = Start; P < Stop; P++) {
136 NumGuards++;
137 if (NumGuards == kNumPCs) {
138 RawPrint(
139 "WARNING: The binary has too many instrumented PCs.\n"
140 " You may want to reduce the size of the binary\n"
141 " for more efficient fuzzing and precise coverage data\n");
142 }
143 *P = NumGuards % kNumPCs;
144 }
145 Modules[NumModules].Start = Start;
146 Modules[NumModules].Stop = Stop;
147 NumModules++;
148 }
149
PrintModuleInfo()150 void TracePC::PrintModuleInfo() {
151 if (NumGuards) {
152 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
153 for (size_t i = 0; i < NumModules; i++)
154 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
155 Modules[i].Start, Modules[i].Stop);
156 Printf("\n");
157 }
158 if (NumModulesWithInline8bitCounters) {
159 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
160 NumModulesWithInline8bitCounters, NumInline8bitCounters);
161 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
162 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
163 ModuleCounters[i].Start, ModuleCounters[i].Stop);
164 Printf("\n");
165 }
166 if (NumPCTables) {
167 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
168 NumPCsInPCTables);
169 for (size_t i = 0; i < NumPCTables; i++) {
170 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
171 ModulePCTable[i].Start, ModulePCTable[i].Stop);
172 }
173 Printf("\n");
174
175 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
176 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
177 Printf("ERROR: The size of coverage PC tables does not match the\n"
178 "number of instrumented PCs. This might be a compiler bug,\n"
179 "please contact the libFuzzer developers.\n"
180 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
181 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
182 _Exit(1);
183 }
184 }
185 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
186 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
187 }
188
189 ATTRIBUTE_NO_SANITIZE_ALL
HandleCallerCallee(uintptr_t Caller,uintptr_t Callee)190 void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
191 const uintptr_t kBits = 12;
192 const uintptr_t kMask = (1 << kBits) - 1;
193 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
194 ValueProfileMap.AddValueModPrime(Idx);
195 }
196
197 /// \return the address of the previous instruction.
198 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
GetPreviousInstructionPc(uintptr_t PC)199 inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
200 #if defined(__arm__)
201 // T32 (Thumb) branch instructions might be 16 or 32 bit long,
202 // so we return (pc-2) in that case in order to be safe.
203 // For A32 mode we return (pc-4) because all instructions are 32 bit long.
204 return (PC - 3) & (~1);
205 #elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
206 // PCs are always 4 byte aligned.
207 return PC - 4;
208 #elif defined(__sparc__) || defined(__mips__)
209 return PC - 8;
210 #else
211 return PC - 1;
212 #endif
213 }
214
215 /// \return the address of the next instruction.
216 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
GetNextInstructionPc(uintptr_t PC)217 inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
218 #if defined(__mips__)
219 return PC + 8;
220 #elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
221 defined(__aarch64__)
222 return PC + 4;
223 #else
224 return PC + 1;
225 #endif
226 }
227
UpdateObservedPCs()228 void TracePC::UpdateObservedPCs() {
229 Vector<uintptr_t> CoveredFuncs;
230 auto ObservePC = [&](uintptr_t PC) {
231 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
232 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
233 Printf("\n");
234 }
235 };
236
237 auto Observe = [&](const PCTableEntry &TE) {
238 if (TE.PCFlags & 1)
239 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
240 CoveredFuncs.push_back(TE.PC);
241 ObservePC(TE.PC);
242 };
243
244 if (NumPCsInPCTables) {
245 if (NumInline8bitCounters == NumPCsInPCTables) {
246 IterateInline8bitCounters([&](int i, int j, int CounterIdx) {
247 if (ModuleCounters[i].Start[j])
248 Observe(ModulePCTable[i].Start[j]);
249 });
250 } else if (NumGuards == NumPCsInPCTables) {
251 size_t GuardIdx = 1;
252 for (size_t i = 0; i < NumModules; i++) {
253 uint32_t *Beg = Modules[i].Start;
254 size_t Size = Modules[i].Stop - Beg;
255 assert(Size ==
256 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
257 for (size_t j = 0; j < Size; j++, GuardIdx++)
258 if (Counters()[GuardIdx])
259 Observe(ModulePCTable[i].Start[j]);
260 }
261 }
262 }
263
264 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
265 i++) {
266 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
267 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
268 Printf("\n");
269 }
270 }
271
272
GetModuleName(uintptr_t PC)273 static std::string GetModuleName(uintptr_t PC) {
274 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
275 void *OffsetRaw = nullptr;
276 if (!EF->__sanitizer_get_module_and_offset_for_pc(
277 reinterpret_cast<void *>(PC), ModulePathRaw,
278 sizeof(ModulePathRaw), &OffsetRaw))
279 return "";
280 return ModulePathRaw;
281 }
282
283 template<class CallBack>
IterateCoveredFunctions(CallBack CB)284 void TracePC::IterateCoveredFunctions(CallBack CB) {
285 for (size_t i = 0; i < NumPCTables; i++) {
286 auto &M = ModulePCTable[i];
287 assert(M.Start < M.Stop);
288 auto ModuleName = GetModuleName(M.Start->PC);
289 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
290 auto FE = NextFE;
291 assert((FE->PCFlags & 1) && "Not a function entry point");
292 do {
293 NextFE++;
294 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
295 if (ObservedFuncs.count(FE->PC))
296 CB(FE, NextFE, ObservedFuncs[FE->PC]);
297 }
298 }
299 }
300
SetFocusFunction(const std::string & FuncName)301 void TracePC::SetFocusFunction(const std::string &FuncName) {
302 // This function should be called once.
303 assert(FocusFunction.first > NumModulesWithInline8bitCounters);
304 if (FuncName.empty())
305 return;
306 for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
307 auto &PCTE = ModulePCTable[M];
308 size_t N = PCTE.Stop - PCTE.Start;
309 for (size_t I = 0; I < N; I++) {
310 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
311 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
312 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
313 Name = Name.substr(3, std::string::npos);
314 if (FuncName != Name) continue;
315 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
316 FocusFunction = {M, I};
317 return;
318 }
319 }
320 }
321
ObservedFocusFunction()322 bool TracePC::ObservedFocusFunction() {
323 size_t I = FocusFunction.first;
324 size_t J = FocusFunction.second;
325 if (I >= NumModulesWithInline8bitCounters)
326 return false;
327 auto &MC = ModuleCounters[I];
328 size_t Size = MC.Stop - MC.Start;
329 if (J >= Size)
330 return false;
331 return MC.Start[J] != 0;
332 }
333
PrintCoverage()334 void TracePC::PrintCoverage() {
335 if (!EF->__sanitizer_symbolize_pc ||
336 !EF->__sanitizer_get_module_and_offset_for_pc) {
337 Printf("INFO: __sanitizer_symbolize_pc or "
338 "__sanitizer_get_module_and_offset_for_pc is not available,"
339 " not printing coverage\n");
340 return;
341 }
342 Printf("COVERAGE:\n");
343 auto CoveredFunctionCallback = [&](const PCTableEntry *First,
344 const PCTableEntry *Last,
345 uintptr_t Counter) {
346 assert(First < Last);
347 auto VisualizePC = GetNextInstructionPc(First->PC);
348 std::string FileStr = DescribePC("%s", VisualizePC);
349 if (!IsInterestingCoverageFile(FileStr))
350 return;
351 std::string FunctionStr = DescribePC("%F", VisualizePC);
352 if (FunctionStr.find("in ") == 0)
353 FunctionStr = FunctionStr.substr(3);
354 std::string LineStr = DescribePC("%l", VisualizePC);
355 size_t Line = std::stoul(LineStr);
356 size_t NumEdges = Last - First;
357 Vector<uintptr_t> UncoveredPCs;
358 for (auto TE = First; TE < Last; TE++)
359 if (!ObservedPCs.count(TE->PC))
360 UncoveredPCs.push_back(TE->PC);
361 Printf("COVERED_FUNC: hits: %zd", Counter);
362 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
363 Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
364 for (auto PC: UncoveredPCs)
365 Printf(" UNCOVERED_PC: %s\n",
366 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
367 };
368
369 IterateCoveredFunctions(CoveredFunctionCallback);
370 }
371
DumpCoverage()372 void TracePC::DumpCoverage() {
373 if (EF->__sanitizer_dump_coverage) {
374 Vector<uintptr_t> PCsCopy(GetNumPCs());
375 for (size_t i = 0; i < GetNumPCs(); i++)
376 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
377 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
378 }
379 }
380
PrintUnstableStats()381 void TracePC::PrintUnstableStats() {
382 size_t count = 0;
383 Printf("UNSTABLE_FUNCTIONS:\n");
384 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
385 const PCTableEntry &TE = ModulePCTable[i].Start[j];
386 if (UnstableCounters[UnstableIdx].IsUnstable) {
387 count++;
388 if (ObservedFuncs.count(TE.PC)) {
389 auto VisualizePC = GetNextInstructionPc(TE.PC);
390 std::string FunctionStr = DescribePC("%F", VisualizePC);
391 if (FunctionStr.find("in ") == 0)
392 FunctionStr = FunctionStr.substr(3);
393 Printf("%s\n", FunctionStr.c_str());
394 }
395 }
396 });
397
398 Printf("stat::stability_rate: %.2f\n",
399 100 - static_cast<float>(count * 100) / NumInline8bitCounters);
400 }
401
402 // Value profile.
403 // We keep track of various values that affect control flow.
404 // These values are inserted into a bit-set-based hash map.
405 // Every new bit in the map is treated as a new coverage.
406 //
407 // For memcmp/strcmp/etc the interesting value is the length of the common
408 // prefix of the parameters.
409 // For cmp instructions the interesting value is a XOR of the parameters.
410 // The interesting value is mixed up with the PC and is then added to the map.
411
412 ATTRIBUTE_NO_SANITIZE_ALL
AddValueForMemcmp(void * caller_pc,const void * s1,const void * s2,size_t n,bool StopAtZero)413 void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
414 size_t n, bool StopAtZero) {
415 if (!n) return;
416 size_t Len = std::min(n, Word::GetMaxSize());
417 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
418 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
419 uint8_t B1[Word::kMaxSize];
420 uint8_t B2[Word::kMaxSize];
421 // Copy the data into locals in this non-msan-instrumented function
422 // to avoid msan complaining further.
423 size_t Hash = 0; // Compute some simple hash of both strings.
424 for (size_t i = 0; i < Len; i++) {
425 B1[i] = A1[i];
426 B2[i] = A2[i];
427 size_t T = B1[i];
428 Hash ^= (T << 8) | B2[i];
429 }
430 size_t I = 0;
431 for (; I < Len; I++)
432 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
433 break;
434 size_t PC = reinterpret_cast<size_t>(caller_pc);
435 size_t Idx = (PC & 4095) | (I << 12);
436 ValueProfileMap.AddValue(Idx);
437 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
438 }
439
440 template <class T>
441 ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
442 ATTRIBUTE_NO_SANITIZE_ALL
HandleCmp(uintptr_t PC,T Arg1,T Arg2)443 void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
444 uint64_t ArgXor = Arg1 ^ Arg2;
445 if (sizeof(T) == 4)
446 TORC4.Insert(ArgXor, Arg1, Arg2);
447 else if (sizeof(T) == 8)
448 TORC8.Insert(ArgXor, Arg1, Arg2);
449 uint64_t HammingDistance = __builtin_popcountll(ArgXor); // [0,64]
450 uint64_t AbsoluteDistance =
451 (Arg1 == Arg2 ? 0 : __builtin_clzll(Arg1 - Arg2) + 1);
452 ValueProfileMap.AddValue(PC * 128 + HammingDistance);
453 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
454 }
455
InternalStrnlen(const char * S,size_t MaxLen)456 static size_t InternalStrnlen(const char *S, size_t MaxLen) {
457 size_t Len = 0;
458 for (; Len < MaxLen && S[Len]; Len++) {}
459 return Len;
460 }
461
462 // Finds min of (strlen(S1), strlen(S2)).
463 // Needed bacause one of these strings may actually be non-zero terminated.
InternalStrnlen2(const char * S1,const char * S2)464 static size_t InternalStrnlen2(const char *S1, const char *S2) {
465 size_t Len = 0;
466 for (; S1[Len] && S2[Len]; Len++) {}
467 return Len;
468 }
469
ClearInlineCounters()470 void TracePC::ClearInlineCounters() {
471 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
472 uint8_t *Beg = ModuleCounters[i].Start;
473 size_t Size = ModuleCounters[i].Stop - Beg;
474 memset(Beg, 0, Size);
475 }
476 }
477
478 ATTRIBUTE_NO_SANITIZE_ALL
RecordInitialStack()479 void TracePC::RecordInitialStack() {
480 int stack;
481 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
482 }
483
GetMaxStackOffset() const484 uintptr_t TracePC::GetMaxStackOffset() const {
485 return InitialStack - __sancov_lowest_stack; // Stack grows down
486 }
487
488 } // namespace fuzzer
489
490 extern "C" {
491 ATTRIBUTE_INTERFACE
492 ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_guard(uint32_t * Guard)493 void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
494 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
495 uint32_t Idx = *Guard;
496 __sancov_trace_pc_pcs[Idx] = PC;
497 __sancov_trace_pc_guard_8bit_counters[Idx]++;
498 }
499
500 // Best-effort support for -fsanitize-coverage=trace-pc, which is available
501 // in both Clang and GCC.
502 ATTRIBUTE_INTERFACE
503 ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc()504 void __sanitizer_cov_trace_pc() {
505 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
506 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
507 __sancov_trace_pc_pcs[Idx] = PC;
508 __sancov_trace_pc_guard_8bit_counters[Idx]++;
509 }
510
511 ATTRIBUTE_INTERFACE
__sanitizer_cov_trace_pc_guard_init(uint32_t * Start,uint32_t * Stop)512 void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
513 fuzzer::TPC.HandleInit(Start, Stop);
514 }
515
516 ATTRIBUTE_INTERFACE
__sanitizer_cov_8bit_counters_init(uint8_t * Start,uint8_t * Stop)517 void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
518 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
519 }
520
521 ATTRIBUTE_INTERFACE
__sanitizer_cov_pcs_init(const uintptr_t * pcs_beg,const uintptr_t * pcs_end)522 void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
523 const uintptr_t *pcs_end) {
524 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
525 }
526
527 ATTRIBUTE_INTERFACE
528 ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_indir(uintptr_t Callee)529 void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
530 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
531 fuzzer::TPC.HandleCallerCallee(PC, Callee);
532 }
533
534 ATTRIBUTE_INTERFACE
535 ATTRIBUTE_NO_SANITIZE_ALL
536 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp8(uint64_t Arg1,uint64_t Arg2)537 void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
538 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
539 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
540 }
541
542 ATTRIBUTE_INTERFACE
543 ATTRIBUTE_NO_SANITIZE_ALL
544 ATTRIBUTE_TARGET_POPCNT
545 // Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
546 // the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
547 // should be changed later to make full use of instrumentation.
__sanitizer_cov_trace_const_cmp8(uint64_t Arg1,uint64_t Arg2)548 void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
549 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
550 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
551 }
552
553 ATTRIBUTE_INTERFACE
554 ATTRIBUTE_NO_SANITIZE_ALL
555 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp4(uint32_t Arg1,uint32_t Arg2)556 void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
557 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
558 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
559 }
560
561 ATTRIBUTE_INTERFACE
562 ATTRIBUTE_NO_SANITIZE_ALL
563 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp4(uint32_t Arg1,uint32_t Arg2)564 void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
565 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
566 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
567 }
568
569 ATTRIBUTE_INTERFACE
570 ATTRIBUTE_NO_SANITIZE_ALL
571 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp2(uint16_t Arg1,uint16_t Arg2)572 void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
573 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
574 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
575 }
576
577 ATTRIBUTE_INTERFACE
578 ATTRIBUTE_NO_SANITIZE_ALL
579 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp2(uint16_t Arg1,uint16_t Arg2)580 void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
581 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
582 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
583 }
584
585 ATTRIBUTE_INTERFACE
586 ATTRIBUTE_NO_SANITIZE_ALL
587 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp1(uint8_t Arg1,uint8_t Arg2)588 void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
589 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
590 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
591 }
592
593 ATTRIBUTE_INTERFACE
594 ATTRIBUTE_NO_SANITIZE_ALL
595 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp1(uint8_t Arg1,uint8_t Arg2)596 void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
597 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
598 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
599 }
600
601 ATTRIBUTE_INTERFACE
602 ATTRIBUTE_NO_SANITIZE_ALL
603 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_switch(uint64_t Val,uint64_t * Cases)604 void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
605 uint64_t N = Cases[0];
606 uint64_t ValSizeInBits = Cases[1];
607 uint64_t *Vals = Cases + 2;
608 // Skip the most common and the most boring case.
609 if (Vals[N - 1] < 256 && Val < 256)
610 return;
611 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
612 size_t i;
613 uint64_t Token = 0;
614 for (i = 0; i < N; i++) {
615 Token = Val ^ Vals[i];
616 if (Val < Vals[i])
617 break;
618 }
619
620 if (ValSizeInBits == 16)
621 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
622 else if (ValSizeInBits == 32)
623 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
624 else
625 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
626 }
627
628 ATTRIBUTE_INTERFACE
629 ATTRIBUTE_NO_SANITIZE_ALL
630 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div4(uint32_t Val)631 void __sanitizer_cov_trace_div4(uint32_t Val) {
632 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
633 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
634 }
635
636 ATTRIBUTE_INTERFACE
637 ATTRIBUTE_NO_SANITIZE_ALL
638 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div8(uint64_t Val)639 void __sanitizer_cov_trace_div8(uint64_t Val) {
640 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
641 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
642 }
643
644 ATTRIBUTE_INTERFACE
645 ATTRIBUTE_NO_SANITIZE_ALL
646 ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_gep(uintptr_t Idx)647 void __sanitizer_cov_trace_gep(uintptr_t Idx) {
648 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
649 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
650 }
651
652 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memcmp(void * caller_pc,const void * s1,const void * s2,size_t n,int result)653 void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
654 const void *s2, size_t n, int result) {
655 if (!fuzzer::RunningUserCallback) return;
656 if (result == 0) return; // No reason to mutate.
657 if (n <= 1) return; // Not interesting.
658 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
659 }
660
661 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncmp(void * caller_pc,const char * s1,const char * s2,size_t n,int result)662 void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
663 const char *s2, size_t n, int result) {
664 if (!fuzzer::RunningUserCallback) return;
665 if (result == 0) return; // No reason to mutate.
666 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
667 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
668 n = std::min(n, Len1);
669 n = std::min(n, Len2);
670 if (n <= 1) return; // Not interesting.
671 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
672 }
673
674 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcmp(void * caller_pc,const char * s1,const char * s2,int result)675 void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
676 const char *s2, int result) {
677 if (!fuzzer::RunningUserCallback) return;
678 if (result == 0) return; // No reason to mutate.
679 size_t N = fuzzer::InternalStrnlen2(s1, s2);
680 if (N <= 1) return; // Not interesting.
681 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
682 }
683
684 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncasecmp(void * called_pc,const char * s1,const char * s2,size_t n,int result)685 void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
686 const char *s2, size_t n, int result) {
687 if (!fuzzer::RunningUserCallback) return;
688 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
689 }
690
691 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasecmp(void * called_pc,const char * s1,const char * s2,int result)692 void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
693 const char *s2, int result) {
694 if (!fuzzer::RunningUserCallback) return;
695 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
696 }
697
698 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strstr(void * called_pc,const char * s1,const char * s2,char * result)699 void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
700 const char *s2, char *result) {
701 if (!fuzzer::RunningUserCallback) return;
702 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
703 }
704
705 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasestr(void * called_pc,const char * s1,const char * s2,char * result)706 void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
707 const char *s2, char *result) {
708 if (!fuzzer::RunningUserCallback) return;
709 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
710 }
711
712 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memmem(void * called_pc,const void * s1,size_t len1,const void * s2,size_t len2,void * result)713 void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
714 const void *s2, size_t len2, void *result) {
715 if (!fuzzer::RunningUserCallback) return;
716 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
717 }
718 } // extern "C"
719