1*a7c257b0Skamil //===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2*a7c257b0Skamil //
3*a7c257b0Skamil // The LLVM Compiler Infrastructure
4*a7c257b0Skamil //
5*a7c257b0Skamil // This file is distributed under the University of Illinois Open Source
6*a7c257b0Skamil // License. See LICENSE.TXT for details.
7*a7c257b0Skamil //
8*a7c257b0Skamil //===----------------------------------------------------------------------===//
9*a7c257b0Skamil // Trace PCs.
10*a7c257b0Skamil // This module implements __sanitizer_cov_trace_pc_guard[_init],
11*a7c257b0Skamil // the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12*a7c257b0Skamil //
13*a7c257b0Skamil //===----------------------------------------------------------------------===//
14*a7c257b0Skamil
15*a7c257b0Skamil #include "FuzzerTracePC.h"
16*a7c257b0Skamil #include "FuzzerCorpus.h"
17*a7c257b0Skamil #include "FuzzerDefs.h"
18*a7c257b0Skamil #include "FuzzerDictionary.h"
19*a7c257b0Skamil #include "FuzzerExtFunctions.h"
20*a7c257b0Skamil #include "FuzzerIO.h"
21*a7c257b0Skamil #include "FuzzerUtil.h"
22*a7c257b0Skamil #include "FuzzerValueBitMap.h"
23*a7c257b0Skamil #include <set>
24*a7c257b0Skamil
25*a7c257b0Skamil // The coverage counters and PCs.
26*a7c257b0Skamil // These are declared as global variables named "__sancov_*" to simplify
27*a7c257b0Skamil // experiments with inlined instrumentation.
28*a7c257b0Skamil alignas(64) ATTRIBUTE_INTERFACE
29*a7c257b0Skamil uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30*a7c257b0Skamil
31*a7c257b0Skamil ATTRIBUTE_INTERFACE
32*a7c257b0Skamil uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33*a7c257b0Skamil
34*a7c257b0Skamil // Used by -fsanitize-coverage=stack-depth to track stack depth
35*a7c257b0Skamil ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
36*a7c257b0Skamil
37*a7c257b0Skamil namespace fuzzer {
38*a7c257b0Skamil
39*a7c257b0Skamil TracePC TPC;
40*a7c257b0Skamil
Counters() const41*a7c257b0Skamil uint8_t *TracePC::Counters() const {
42*a7c257b0Skamil return __sancov_trace_pc_guard_8bit_counters;
43*a7c257b0Skamil }
44*a7c257b0Skamil
PCs() const45*a7c257b0Skamil uintptr_t *TracePC::PCs() const {
46*a7c257b0Skamil return __sancov_trace_pc_pcs;
47*a7c257b0Skamil }
48*a7c257b0Skamil
GetTotalPCCoverage()49*a7c257b0Skamil size_t TracePC::GetTotalPCCoverage() {
50*a7c257b0Skamil if (ObservedPCs.size())
51*a7c257b0Skamil return ObservedPCs.size();
52*a7c257b0Skamil size_t Res = 0;
53*a7c257b0Skamil for (size_t i = 1, N = GetNumPCs(); i < N; i++)
54*a7c257b0Skamil if (PCs()[i])
55*a7c257b0Skamil Res++;
56*a7c257b0Skamil return Res;
57*a7c257b0Skamil }
58*a7c257b0Skamil
59*a7c257b0Skamil template<class CallBack>
IterateInline8bitCounters(CallBack CB) const60*a7c257b0Skamil void TracePC::IterateInline8bitCounters(CallBack CB) const {
61*a7c257b0Skamil if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
62*a7c257b0Skamil size_t CounterIdx = 0;
63*a7c257b0Skamil for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
64*a7c257b0Skamil uint8_t *Beg = ModuleCounters[i].Start;
65*a7c257b0Skamil size_t Size = ModuleCounters[i].Stop - Beg;
66*a7c257b0Skamil assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
67*a7c257b0Skamil for (size_t j = 0; j < Size; j++, CounterIdx++)
68*a7c257b0Skamil CB(i, j, CounterIdx);
69*a7c257b0Skamil }
70*a7c257b0Skamil }
71*a7c257b0Skamil }
72*a7c257b0Skamil
73*a7c257b0Skamil // Initializes unstable counters by copying Inline8bitCounters to unstable
74*a7c257b0Skamil // counters.
InitializeUnstableCounters()75*a7c257b0Skamil void TracePC::InitializeUnstableCounters() {
76*a7c257b0Skamil IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
77*a7c257b0Skamil UnstableCounters[UnstableIdx].Counter = ModuleCounters[i].Start[j];
78*a7c257b0Skamil });
79*a7c257b0Skamil }
80*a7c257b0Skamil
81*a7c257b0Skamil // Compares the current counters with counters from previous runs
82*a7c257b0Skamil // and records differences as unstable edges.
UpdateUnstableCounters(int UnstableMode)83*a7c257b0Skamil bool TracePC::UpdateUnstableCounters(int UnstableMode) {
84*a7c257b0Skamil bool Updated = false;
85*a7c257b0Skamil IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
86*a7c257b0Skamil if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
87*a7c257b0Skamil Updated = true;
88*a7c257b0Skamil UnstableCounters[UnstableIdx].IsUnstable = true;
89*a7c257b0Skamil if (UnstableMode == ZeroUnstable)
90*a7c257b0Skamil UnstableCounters[UnstableIdx].Counter = 0;
91*a7c257b0Skamil else if (UnstableMode == MinUnstable)
92*a7c257b0Skamil UnstableCounters[UnstableIdx].Counter = std::min(
93*a7c257b0Skamil ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
94*a7c257b0Skamil }
95*a7c257b0Skamil });
96*a7c257b0Skamil return Updated;
97*a7c257b0Skamil }
98*a7c257b0Skamil
99*a7c257b0Skamil // Updates and applies unstable counters to ModuleCounters in single iteration
UpdateAndApplyUnstableCounters(int UnstableMode)100*a7c257b0Skamil void TracePC::UpdateAndApplyUnstableCounters(int UnstableMode) {
101*a7c257b0Skamil IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
102*a7c257b0Skamil if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
103*a7c257b0Skamil UnstableCounters[UnstableIdx].IsUnstable = true;
104*a7c257b0Skamil if (UnstableMode == ZeroUnstable)
105*a7c257b0Skamil ModuleCounters[i].Start[j] = 0;
106*a7c257b0Skamil else if (UnstableMode == MinUnstable)
107*a7c257b0Skamil ModuleCounters[i].Start[j] = std::min(
108*a7c257b0Skamil ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
109*a7c257b0Skamil }
110*a7c257b0Skamil });
111*a7c257b0Skamil }
112*a7c257b0Skamil
HandleInline8bitCountersInit(uint8_t * Start,uint8_t * Stop)113*a7c257b0Skamil void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
114*a7c257b0Skamil if (Start == Stop) return;
115*a7c257b0Skamil if (NumModulesWithInline8bitCounters &&
116*a7c257b0Skamil ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
117*a7c257b0Skamil assert(NumModulesWithInline8bitCounters <
118*a7c257b0Skamil sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
119*a7c257b0Skamil ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
120*a7c257b0Skamil NumInline8bitCounters += Stop - Start;
121*a7c257b0Skamil }
122*a7c257b0Skamil
HandlePCsInit(const uintptr_t * Start,const uintptr_t * Stop)123*a7c257b0Skamil void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
124*a7c257b0Skamil const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
125*a7c257b0Skamil const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
126*a7c257b0Skamil if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
127*a7c257b0Skamil assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
128*a7c257b0Skamil ModulePCTable[NumPCTables++] = {B, E};
129*a7c257b0Skamil NumPCsInPCTables += E - B;
130*a7c257b0Skamil }
131*a7c257b0Skamil
HandleInit(uint32_t * Start,uint32_t * Stop)132*a7c257b0Skamil void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
133*a7c257b0Skamil if (Start == Stop || *Start) return;
134*a7c257b0Skamil assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
135*a7c257b0Skamil for (uint32_t *P = Start; P < Stop; P++) {
136*a7c257b0Skamil NumGuards++;
137*a7c257b0Skamil if (NumGuards == kNumPCs) {
138*a7c257b0Skamil RawPrint(
139*a7c257b0Skamil "WARNING: The binary has too many instrumented PCs.\n"
140*a7c257b0Skamil " You may want to reduce the size of the binary\n"
141*a7c257b0Skamil " for more efficient fuzzing and precise coverage data\n");
142*a7c257b0Skamil }
143*a7c257b0Skamil *P = NumGuards % kNumPCs;
144*a7c257b0Skamil }
145*a7c257b0Skamil Modules[NumModules].Start = Start;
146*a7c257b0Skamil Modules[NumModules].Stop = Stop;
147*a7c257b0Skamil NumModules++;
148*a7c257b0Skamil }
149*a7c257b0Skamil
PrintModuleInfo()150*a7c257b0Skamil void TracePC::PrintModuleInfo() {
151*a7c257b0Skamil if (NumGuards) {
152*a7c257b0Skamil Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
153*a7c257b0Skamil for (size_t i = 0; i < NumModules; i++)
154*a7c257b0Skamil Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
155*a7c257b0Skamil Modules[i].Start, Modules[i].Stop);
156*a7c257b0Skamil Printf("\n");
157*a7c257b0Skamil }
158*a7c257b0Skamil if (NumModulesWithInline8bitCounters) {
159*a7c257b0Skamil Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
160*a7c257b0Skamil NumModulesWithInline8bitCounters, NumInline8bitCounters);
161*a7c257b0Skamil for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
162*a7c257b0Skamil Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
163*a7c257b0Skamil ModuleCounters[i].Start, ModuleCounters[i].Stop);
164*a7c257b0Skamil Printf("\n");
165*a7c257b0Skamil }
166*a7c257b0Skamil if (NumPCTables) {
167*a7c257b0Skamil Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
168*a7c257b0Skamil NumPCsInPCTables);
169*a7c257b0Skamil for (size_t i = 0; i < NumPCTables; i++) {
170*a7c257b0Skamil Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
171*a7c257b0Skamil ModulePCTable[i].Start, ModulePCTable[i].Stop);
172*a7c257b0Skamil }
173*a7c257b0Skamil Printf("\n");
174*a7c257b0Skamil
175*a7c257b0Skamil if ((NumGuards && NumGuards != NumPCsInPCTables) ||
176*a7c257b0Skamil (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
177*a7c257b0Skamil Printf("ERROR: The size of coverage PC tables does not match the\n"
178*a7c257b0Skamil "number of instrumented PCs. This might be a compiler bug,\n"
179*a7c257b0Skamil "please contact the libFuzzer developers.\n"
180*a7c257b0Skamil "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
181*a7c257b0Skamil "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
182*a7c257b0Skamil _Exit(1);
183*a7c257b0Skamil }
184*a7c257b0Skamil }
185*a7c257b0Skamil if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
186*a7c257b0Skamil Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
187*a7c257b0Skamil }
188*a7c257b0Skamil
189*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
HandleCallerCallee(uintptr_t Caller,uintptr_t Callee)190*a7c257b0Skamil void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
191*a7c257b0Skamil const uintptr_t kBits = 12;
192*a7c257b0Skamil const uintptr_t kMask = (1 << kBits) - 1;
193*a7c257b0Skamil uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
194*a7c257b0Skamil ValueProfileMap.AddValueModPrime(Idx);
195*a7c257b0Skamil }
196*a7c257b0Skamil
197*a7c257b0Skamil /// \return the address of the previous instruction.
198*a7c257b0Skamil /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
GetPreviousInstructionPc(uintptr_t PC)199*a7c257b0Skamil inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
200*a7c257b0Skamil #if defined(__arm__)
201*a7c257b0Skamil // T32 (Thumb) branch instructions might be 16 or 32 bit long,
202*a7c257b0Skamil // so we return (pc-2) in that case in order to be safe.
203*a7c257b0Skamil // For A32 mode we return (pc-4) because all instructions are 32 bit long.
204*a7c257b0Skamil return (PC - 3) & (~1);
205*a7c257b0Skamil #elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
206*a7c257b0Skamil // PCs are always 4 byte aligned.
207*a7c257b0Skamil return PC - 4;
208*a7c257b0Skamil #elif defined(__sparc__) || defined(__mips__)
209*a7c257b0Skamil return PC - 8;
210*a7c257b0Skamil #else
211*a7c257b0Skamil return PC - 1;
212*a7c257b0Skamil #endif
213*a7c257b0Skamil }
214*a7c257b0Skamil
215*a7c257b0Skamil /// \return the address of the next instruction.
216*a7c257b0Skamil /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
GetNextInstructionPc(uintptr_t PC)217*a7c257b0Skamil inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
218*a7c257b0Skamil #if defined(__mips__)
219*a7c257b0Skamil return PC + 8;
220*a7c257b0Skamil #elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
221*a7c257b0Skamil defined(__aarch64__)
222*a7c257b0Skamil return PC + 4;
223*a7c257b0Skamil #else
224*a7c257b0Skamil return PC + 1;
225*a7c257b0Skamil #endif
226*a7c257b0Skamil }
227*a7c257b0Skamil
UpdateObservedPCs()228*a7c257b0Skamil void TracePC::UpdateObservedPCs() {
229*a7c257b0Skamil Vector<uintptr_t> CoveredFuncs;
230*a7c257b0Skamil auto ObservePC = [&](uintptr_t PC) {
231*a7c257b0Skamil if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
232*a7c257b0Skamil PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
233*a7c257b0Skamil Printf("\n");
234*a7c257b0Skamil }
235*a7c257b0Skamil };
236*a7c257b0Skamil
237*a7c257b0Skamil auto Observe = [&](const PCTableEntry &TE) {
238*a7c257b0Skamil if (TE.PCFlags & 1)
239*a7c257b0Skamil if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
240*a7c257b0Skamil CoveredFuncs.push_back(TE.PC);
241*a7c257b0Skamil ObservePC(TE.PC);
242*a7c257b0Skamil };
243*a7c257b0Skamil
244*a7c257b0Skamil if (NumPCsInPCTables) {
245*a7c257b0Skamil if (NumInline8bitCounters == NumPCsInPCTables) {
246*a7c257b0Skamil IterateInline8bitCounters([&](int i, int j, int CounterIdx) {
247*a7c257b0Skamil if (ModuleCounters[i].Start[j])
248*a7c257b0Skamil Observe(ModulePCTable[i].Start[j]);
249*a7c257b0Skamil });
250*a7c257b0Skamil } else if (NumGuards == NumPCsInPCTables) {
251*a7c257b0Skamil size_t GuardIdx = 1;
252*a7c257b0Skamil for (size_t i = 0; i < NumModules; i++) {
253*a7c257b0Skamil uint32_t *Beg = Modules[i].Start;
254*a7c257b0Skamil size_t Size = Modules[i].Stop - Beg;
255*a7c257b0Skamil assert(Size ==
256*a7c257b0Skamil (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
257*a7c257b0Skamil for (size_t j = 0; j < Size; j++, GuardIdx++)
258*a7c257b0Skamil if (Counters()[GuardIdx])
259*a7c257b0Skamil Observe(ModulePCTable[i].Start[j]);
260*a7c257b0Skamil }
261*a7c257b0Skamil }
262*a7c257b0Skamil }
263*a7c257b0Skamil
264*a7c257b0Skamil for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
265*a7c257b0Skamil i++) {
266*a7c257b0Skamil Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
267*a7c257b0Skamil PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
268*a7c257b0Skamil Printf("\n");
269*a7c257b0Skamil }
270*a7c257b0Skamil }
271*a7c257b0Skamil
272*a7c257b0Skamil
GetModuleName(uintptr_t PC)273*a7c257b0Skamil static std::string GetModuleName(uintptr_t PC) {
274*a7c257b0Skamil char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
275*a7c257b0Skamil void *OffsetRaw = nullptr;
276*a7c257b0Skamil if (!EF->__sanitizer_get_module_and_offset_for_pc(
277*a7c257b0Skamil reinterpret_cast<void *>(PC), ModulePathRaw,
278*a7c257b0Skamil sizeof(ModulePathRaw), &OffsetRaw))
279*a7c257b0Skamil return "";
280*a7c257b0Skamil return ModulePathRaw;
281*a7c257b0Skamil }
282*a7c257b0Skamil
283*a7c257b0Skamil template<class CallBack>
IterateCoveredFunctions(CallBack CB)284*a7c257b0Skamil void TracePC::IterateCoveredFunctions(CallBack CB) {
285*a7c257b0Skamil for (size_t i = 0; i < NumPCTables; i++) {
286*a7c257b0Skamil auto &M = ModulePCTable[i];
287*a7c257b0Skamil assert(M.Start < M.Stop);
288*a7c257b0Skamil auto ModuleName = GetModuleName(M.Start->PC);
289*a7c257b0Skamil for (auto NextFE = M.Start; NextFE < M.Stop; ) {
290*a7c257b0Skamil auto FE = NextFE;
291*a7c257b0Skamil assert((FE->PCFlags & 1) && "Not a function entry point");
292*a7c257b0Skamil do {
293*a7c257b0Skamil NextFE++;
294*a7c257b0Skamil } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
295*a7c257b0Skamil if (ObservedFuncs.count(FE->PC))
296*a7c257b0Skamil CB(FE, NextFE, ObservedFuncs[FE->PC]);
297*a7c257b0Skamil }
298*a7c257b0Skamil }
299*a7c257b0Skamil }
300*a7c257b0Skamil
SetFocusFunction(const std::string & FuncName)301*a7c257b0Skamil void TracePC::SetFocusFunction(const std::string &FuncName) {
302*a7c257b0Skamil // This function should be called once.
303*a7c257b0Skamil assert(FocusFunction.first > NumModulesWithInline8bitCounters);
304*a7c257b0Skamil if (FuncName.empty())
305*a7c257b0Skamil return;
306*a7c257b0Skamil for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
307*a7c257b0Skamil auto &PCTE = ModulePCTable[M];
308*a7c257b0Skamil size_t N = PCTE.Stop - PCTE.Start;
309*a7c257b0Skamil for (size_t I = 0; I < N; I++) {
310*a7c257b0Skamil if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
311*a7c257b0Skamil auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
312*a7c257b0Skamil if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
313*a7c257b0Skamil Name = Name.substr(3, std::string::npos);
314*a7c257b0Skamil if (FuncName != Name) continue;
315*a7c257b0Skamil Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
316*a7c257b0Skamil FocusFunction = {M, I};
317*a7c257b0Skamil return;
318*a7c257b0Skamil }
319*a7c257b0Skamil }
320*a7c257b0Skamil }
321*a7c257b0Skamil
ObservedFocusFunction()322*a7c257b0Skamil bool TracePC::ObservedFocusFunction() {
323*a7c257b0Skamil size_t I = FocusFunction.first;
324*a7c257b0Skamil size_t J = FocusFunction.second;
325*a7c257b0Skamil if (I >= NumModulesWithInline8bitCounters)
326*a7c257b0Skamil return false;
327*a7c257b0Skamil auto &MC = ModuleCounters[I];
328*a7c257b0Skamil size_t Size = MC.Stop - MC.Start;
329*a7c257b0Skamil if (J >= Size)
330*a7c257b0Skamil return false;
331*a7c257b0Skamil return MC.Start[J] != 0;
332*a7c257b0Skamil }
333*a7c257b0Skamil
PrintCoverage()334*a7c257b0Skamil void TracePC::PrintCoverage() {
335*a7c257b0Skamil if (!EF->__sanitizer_symbolize_pc ||
336*a7c257b0Skamil !EF->__sanitizer_get_module_and_offset_for_pc) {
337*a7c257b0Skamil Printf("INFO: __sanitizer_symbolize_pc or "
338*a7c257b0Skamil "__sanitizer_get_module_and_offset_for_pc is not available,"
339*a7c257b0Skamil " not printing coverage\n");
340*a7c257b0Skamil return;
341*a7c257b0Skamil }
342*a7c257b0Skamil Printf("COVERAGE:\n");
343*a7c257b0Skamil auto CoveredFunctionCallback = [&](const PCTableEntry *First,
344*a7c257b0Skamil const PCTableEntry *Last,
345*a7c257b0Skamil uintptr_t Counter) {
346*a7c257b0Skamil assert(First < Last);
347*a7c257b0Skamil auto VisualizePC = GetNextInstructionPc(First->PC);
348*a7c257b0Skamil std::string FileStr = DescribePC("%s", VisualizePC);
349*a7c257b0Skamil if (!IsInterestingCoverageFile(FileStr))
350*a7c257b0Skamil return;
351*a7c257b0Skamil std::string FunctionStr = DescribePC("%F", VisualizePC);
352*a7c257b0Skamil if (FunctionStr.find("in ") == 0)
353*a7c257b0Skamil FunctionStr = FunctionStr.substr(3);
354*a7c257b0Skamil std::string LineStr = DescribePC("%l", VisualizePC);
355*a7c257b0Skamil size_t Line = std::stoul(LineStr);
356*a7c257b0Skamil size_t NumEdges = Last - First;
357*a7c257b0Skamil Vector<uintptr_t> UncoveredPCs;
358*a7c257b0Skamil for (auto TE = First; TE < Last; TE++)
359*a7c257b0Skamil if (!ObservedPCs.count(TE->PC))
360*a7c257b0Skamil UncoveredPCs.push_back(TE->PC);
361*a7c257b0Skamil Printf("COVERED_FUNC: hits: %zd", Counter);
362*a7c257b0Skamil Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
363*a7c257b0Skamil Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
364*a7c257b0Skamil for (auto PC: UncoveredPCs)
365*a7c257b0Skamil Printf(" UNCOVERED_PC: %s\n",
366*a7c257b0Skamil DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
367*a7c257b0Skamil };
368*a7c257b0Skamil
369*a7c257b0Skamil IterateCoveredFunctions(CoveredFunctionCallback);
370*a7c257b0Skamil }
371*a7c257b0Skamil
DumpCoverage()372*a7c257b0Skamil void TracePC::DumpCoverage() {
373*a7c257b0Skamil if (EF->__sanitizer_dump_coverage) {
374*a7c257b0Skamil Vector<uintptr_t> PCsCopy(GetNumPCs());
375*a7c257b0Skamil for (size_t i = 0; i < GetNumPCs(); i++)
376*a7c257b0Skamil PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
377*a7c257b0Skamil EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
378*a7c257b0Skamil }
379*a7c257b0Skamil }
380*a7c257b0Skamil
PrintUnstableStats()381*a7c257b0Skamil void TracePC::PrintUnstableStats() {
382*a7c257b0Skamil size_t count = 0;
383*a7c257b0Skamil Printf("UNSTABLE_FUNCTIONS:\n");
384*a7c257b0Skamil IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
385*a7c257b0Skamil const PCTableEntry &TE = ModulePCTable[i].Start[j];
386*a7c257b0Skamil if (UnstableCounters[UnstableIdx].IsUnstable) {
387*a7c257b0Skamil count++;
388*a7c257b0Skamil if (ObservedFuncs.count(TE.PC)) {
389*a7c257b0Skamil auto VisualizePC = GetNextInstructionPc(TE.PC);
390*a7c257b0Skamil std::string FunctionStr = DescribePC("%F", VisualizePC);
391*a7c257b0Skamil if (FunctionStr.find("in ") == 0)
392*a7c257b0Skamil FunctionStr = FunctionStr.substr(3);
393*a7c257b0Skamil Printf("%s\n", FunctionStr.c_str());
394*a7c257b0Skamil }
395*a7c257b0Skamil }
396*a7c257b0Skamil });
397*a7c257b0Skamil
398*a7c257b0Skamil Printf("stat::stability_rate: %.2f\n",
399*a7c257b0Skamil 100 - static_cast<float>(count * 100) / NumInline8bitCounters);
400*a7c257b0Skamil }
401*a7c257b0Skamil
402*a7c257b0Skamil // Value profile.
403*a7c257b0Skamil // We keep track of various values that affect control flow.
404*a7c257b0Skamil // These values are inserted into a bit-set-based hash map.
405*a7c257b0Skamil // Every new bit in the map is treated as a new coverage.
406*a7c257b0Skamil //
407*a7c257b0Skamil // For memcmp/strcmp/etc the interesting value is the length of the common
408*a7c257b0Skamil // prefix of the parameters.
409*a7c257b0Skamil // For cmp instructions the interesting value is a XOR of the parameters.
410*a7c257b0Skamil // The interesting value is mixed up with the PC and is then added to the map.
411*a7c257b0Skamil
412*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
AddValueForMemcmp(void * caller_pc,const void * s1,const void * s2,size_t n,bool StopAtZero)413*a7c257b0Skamil void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
414*a7c257b0Skamil size_t n, bool StopAtZero) {
415*a7c257b0Skamil if (!n) return;
416*a7c257b0Skamil size_t Len = std::min(n, Word::GetMaxSize());
417*a7c257b0Skamil const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
418*a7c257b0Skamil const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
419*a7c257b0Skamil uint8_t B1[Word::kMaxSize];
420*a7c257b0Skamil uint8_t B2[Word::kMaxSize];
421*a7c257b0Skamil // Copy the data into locals in this non-msan-instrumented function
422*a7c257b0Skamil // to avoid msan complaining further.
423*a7c257b0Skamil size_t Hash = 0; // Compute some simple hash of both strings.
424*a7c257b0Skamil for (size_t i = 0; i < Len; i++) {
425*a7c257b0Skamil B1[i] = A1[i];
426*a7c257b0Skamil B2[i] = A2[i];
427*a7c257b0Skamil size_t T = B1[i];
428*a7c257b0Skamil Hash ^= (T << 8) | B2[i];
429*a7c257b0Skamil }
430*a7c257b0Skamil size_t I = 0;
431*a7c257b0Skamil for (; I < Len; I++)
432*a7c257b0Skamil if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
433*a7c257b0Skamil break;
434*a7c257b0Skamil size_t PC = reinterpret_cast<size_t>(caller_pc);
435*a7c257b0Skamil size_t Idx = (PC & 4095) | (I << 12);
436*a7c257b0Skamil ValueProfileMap.AddValue(Idx);
437*a7c257b0Skamil TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
438*a7c257b0Skamil }
439*a7c257b0Skamil
440*a7c257b0Skamil template <class T>
441*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
442*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
HandleCmp(uintptr_t PC,T Arg1,T Arg2)443*a7c257b0Skamil void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
444*a7c257b0Skamil uint64_t ArgXor = Arg1 ^ Arg2;
445*a7c257b0Skamil if (sizeof(T) == 4)
446*a7c257b0Skamil TORC4.Insert(ArgXor, Arg1, Arg2);
447*a7c257b0Skamil else if (sizeof(T) == 8)
448*a7c257b0Skamil TORC8.Insert(ArgXor, Arg1, Arg2);
449*a7c257b0Skamil uint64_t HammingDistance = __builtin_popcountll(ArgXor); // [0,64]
450*a7c257b0Skamil uint64_t AbsoluteDistance =
451*a7c257b0Skamil (Arg1 == Arg2 ? 0 : __builtin_clzll(Arg1 - Arg2) + 1);
452*a7c257b0Skamil ValueProfileMap.AddValue(PC * 128 + HammingDistance);
453*a7c257b0Skamil ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
454*a7c257b0Skamil }
455*a7c257b0Skamil
InternalStrnlen(const char * S,size_t MaxLen)456*a7c257b0Skamil static size_t InternalStrnlen(const char *S, size_t MaxLen) {
457*a7c257b0Skamil size_t Len = 0;
458*a7c257b0Skamil for (; Len < MaxLen && S[Len]; Len++) {}
459*a7c257b0Skamil return Len;
460*a7c257b0Skamil }
461*a7c257b0Skamil
462*a7c257b0Skamil // Finds min of (strlen(S1), strlen(S2)).
463*a7c257b0Skamil // Needed bacause one of these strings may actually be non-zero terminated.
InternalStrnlen2(const char * S1,const char * S2)464*a7c257b0Skamil static size_t InternalStrnlen2(const char *S1, const char *S2) {
465*a7c257b0Skamil size_t Len = 0;
466*a7c257b0Skamil for (; S1[Len] && S2[Len]; Len++) {}
467*a7c257b0Skamil return Len;
468*a7c257b0Skamil }
469*a7c257b0Skamil
ClearInlineCounters()470*a7c257b0Skamil void TracePC::ClearInlineCounters() {
471*a7c257b0Skamil for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
472*a7c257b0Skamil uint8_t *Beg = ModuleCounters[i].Start;
473*a7c257b0Skamil size_t Size = ModuleCounters[i].Stop - Beg;
474*a7c257b0Skamil memset(Beg, 0, Size);
475*a7c257b0Skamil }
476*a7c257b0Skamil }
477*a7c257b0Skamil
478*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
RecordInitialStack()479*a7c257b0Skamil void TracePC::RecordInitialStack() {
480*a7c257b0Skamil int stack;
481*a7c257b0Skamil __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
482*a7c257b0Skamil }
483*a7c257b0Skamil
GetMaxStackOffset() const484*a7c257b0Skamil uintptr_t TracePC::GetMaxStackOffset() const {
485*a7c257b0Skamil return InitialStack - __sancov_lowest_stack; // Stack grows down
486*a7c257b0Skamil }
487*a7c257b0Skamil
488*a7c257b0Skamil } // namespace fuzzer
489*a7c257b0Skamil
490*a7c257b0Skamil extern "C" {
491*a7c257b0Skamil ATTRIBUTE_INTERFACE
492*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_guard(uint32_t * Guard)493*a7c257b0Skamil void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
494*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
495*a7c257b0Skamil uint32_t Idx = *Guard;
496*a7c257b0Skamil __sancov_trace_pc_pcs[Idx] = PC;
497*a7c257b0Skamil __sancov_trace_pc_guard_8bit_counters[Idx]++;
498*a7c257b0Skamil }
499*a7c257b0Skamil
500*a7c257b0Skamil // Best-effort support for -fsanitize-coverage=trace-pc, which is available
501*a7c257b0Skamil // in both Clang and GCC.
502*a7c257b0Skamil ATTRIBUTE_INTERFACE
503*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc()504*a7c257b0Skamil void __sanitizer_cov_trace_pc() {
505*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
506*a7c257b0Skamil uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
507*a7c257b0Skamil __sancov_trace_pc_pcs[Idx] = PC;
508*a7c257b0Skamil __sancov_trace_pc_guard_8bit_counters[Idx]++;
509*a7c257b0Skamil }
510*a7c257b0Skamil
511*a7c257b0Skamil ATTRIBUTE_INTERFACE
__sanitizer_cov_trace_pc_guard_init(uint32_t * Start,uint32_t * Stop)512*a7c257b0Skamil void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
513*a7c257b0Skamil fuzzer::TPC.HandleInit(Start, Stop);
514*a7c257b0Skamil }
515*a7c257b0Skamil
516*a7c257b0Skamil ATTRIBUTE_INTERFACE
__sanitizer_cov_8bit_counters_init(uint8_t * Start,uint8_t * Stop)517*a7c257b0Skamil void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
518*a7c257b0Skamil fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
519*a7c257b0Skamil }
520*a7c257b0Skamil
521*a7c257b0Skamil ATTRIBUTE_INTERFACE
__sanitizer_cov_pcs_init(const uintptr_t * pcs_beg,const uintptr_t * pcs_end)522*a7c257b0Skamil void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
523*a7c257b0Skamil const uintptr_t *pcs_end) {
524*a7c257b0Skamil fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
525*a7c257b0Skamil }
526*a7c257b0Skamil
527*a7c257b0Skamil ATTRIBUTE_INTERFACE
528*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
__sanitizer_cov_trace_pc_indir(uintptr_t Callee)529*a7c257b0Skamil void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
530*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
531*a7c257b0Skamil fuzzer::TPC.HandleCallerCallee(PC, Callee);
532*a7c257b0Skamil }
533*a7c257b0Skamil
534*a7c257b0Skamil ATTRIBUTE_INTERFACE
535*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
536*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp8(uint64_t Arg1,uint64_t Arg2)537*a7c257b0Skamil void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
538*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
539*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
540*a7c257b0Skamil }
541*a7c257b0Skamil
542*a7c257b0Skamil ATTRIBUTE_INTERFACE
543*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
544*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
545*a7c257b0Skamil // Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
546*a7c257b0Skamil // the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
547*a7c257b0Skamil // should be changed later to make full use of instrumentation.
__sanitizer_cov_trace_const_cmp8(uint64_t Arg1,uint64_t Arg2)548*a7c257b0Skamil void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
549*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
550*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
551*a7c257b0Skamil }
552*a7c257b0Skamil
553*a7c257b0Skamil ATTRIBUTE_INTERFACE
554*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
555*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp4(uint32_t Arg1,uint32_t Arg2)556*a7c257b0Skamil void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
557*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
558*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
559*a7c257b0Skamil }
560*a7c257b0Skamil
561*a7c257b0Skamil ATTRIBUTE_INTERFACE
562*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
563*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp4(uint32_t Arg1,uint32_t Arg2)564*a7c257b0Skamil void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
565*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
566*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
567*a7c257b0Skamil }
568*a7c257b0Skamil
569*a7c257b0Skamil ATTRIBUTE_INTERFACE
570*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
571*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp2(uint16_t Arg1,uint16_t Arg2)572*a7c257b0Skamil void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
573*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
574*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
575*a7c257b0Skamil }
576*a7c257b0Skamil
577*a7c257b0Skamil ATTRIBUTE_INTERFACE
578*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
579*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp2(uint16_t Arg1,uint16_t Arg2)580*a7c257b0Skamil void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
581*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
582*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
583*a7c257b0Skamil }
584*a7c257b0Skamil
585*a7c257b0Skamil ATTRIBUTE_INTERFACE
586*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
587*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_cmp1(uint8_t Arg1,uint8_t Arg2)588*a7c257b0Skamil void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
589*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
590*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
591*a7c257b0Skamil }
592*a7c257b0Skamil
593*a7c257b0Skamil ATTRIBUTE_INTERFACE
594*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
595*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_const_cmp1(uint8_t Arg1,uint8_t Arg2)596*a7c257b0Skamil void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
597*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
598*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
599*a7c257b0Skamil }
600*a7c257b0Skamil
601*a7c257b0Skamil ATTRIBUTE_INTERFACE
602*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
603*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_switch(uint64_t Val,uint64_t * Cases)604*a7c257b0Skamil void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
605*a7c257b0Skamil uint64_t N = Cases[0];
606*a7c257b0Skamil uint64_t ValSizeInBits = Cases[1];
607*a7c257b0Skamil uint64_t *Vals = Cases + 2;
608*a7c257b0Skamil // Skip the most common and the most boring case.
609*a7c257b0Skamil if (Vals[N - 1] < 256 && Val < 256)
610*a7c257b0Skamil return;
611*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
612*a7c257b0Skamil size_t i;
613*a7c257b0Skamil uint64_t Token = 0;
614*a7c257b0Skamil for (i = 0; i < N; i++) {
615*a7c257b0Skamil Token = Val ^ Vals[i];
616*a7c257b0Skamil if (Val < Vals[i])
617*a7c257b0Skamil break;
618*a7c257b0Skamil }
619*a7c257b0Skamil
620*a7c257b0Skamil if (ValSizeInBits == 16)
621*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
622*a7c257b0Skamil else if (ValSizeInBits == 32)
623*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
624*a7c257b0Skamil else
625*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
626*a7c257b0Skamil }
627*a7c257b0Skamil
628*a7c257b0Skamil ATTRIBUTE_INTERFACE
629*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
630*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div4(uint32_t Val)631*a7c257b0Skamil void __sanitizer_cov_trace_div4(uint32_t Val) {
632*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
633*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
634*a7c257b0Skamil }
635*a7c257b0Skamil
636*a7c257b0Skamil ATTRIBUTE_INTERFACE
637*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
638*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_div8(uint64_t Val)639*a7c257b0Skamil void __sanitizer_cov_trace_div8(uint64_t Val) {
640*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
641*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
642*a7c257b0Skamil }
643*a7c257b0Skamil
644*a7c257b0Skamil ATTRIBUTE_INTERFACE
645*a7c257b0Skamil ATTRIBUTE_NO_SANITIZE_ALL
646*a7c257b0Skamil ATTRIBUTE_TARGET_POPCNT
__sanitizer_cov_trace_gep(uintptr_t Idx)647*a7c257b0Skamil void __sanitizer_cov_trace_gep(uintptr_t Idx) {
648*a7c257b0Skamil uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
649*a7c257b0Skamil fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
650*a7c257b0Skamil }
651*a7c257b0Skamil
652*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memcmp(void * caller_pc,const void * s1,const void * s2,size_t n,int result)653*a7c257b0Skamil void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
654*a7c257b0Skamil const void *s2, size_t n, int result) {
655*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
656*a7c257b0Skamil if (result == 0) return; // No reason to mutate.
657*a7c257b0Skamil if (n <= 1) return; // Not interesting.
658*a7c257b0Skamil fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
659*a7c257b0Skamil }
660*a7c257b0Skamil
661*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncmp(void * caller_pc,const char * s1,const char * s2,size_t n,int result)662*a7c257b0Skamil void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
663*a7c257b0Skamil const char *s2, size_t n, int result) {
664*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
665*a7c257b0Skamil if (result == 0) return; // No reason to mutate.
666*a7c257b0Skamil size_t Len1 = fuzzer::InternalStrnlen(s1, n);
667*a7c257b0Skamil size_t Len2 = fuzzer::InternalStrnlen(s2, n);
668*a7c257b0Skamil n = std::min(n, Len1);
669*a7c257b0Skamil n = std::min(n, Len2);
670*a7c257b0Skamil if (n <= 1) return; // Not interesting.
671*a7c257b0Skamil fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
672*a7c257b0Skamil }
673*a7c257b0Skamil
674*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcmp(void * caller_pc,const char * s1,const char * s2,int result)675*a7c257b0Skamil void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
676*a7c257b0Skamil const char *s2, int result) {
677*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
678*a7c257b0Skamil if (result == 0) return; // No reason to mutate.
679*a7c257b0Skamil size_t N = fuzzer::InternalStrnlen2(s1, s2);
680*a7c257b0Skamil if (N <= 1) return; // Not interesting.
681*a7c257b0Skamil fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
682*a7c257b0Skamil }
683*a7c257b0Skamil
684*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strncasecmp(void * called_pc,const char * s1,const char * s2,size_t n,int result)685*a7c257b0Skamil void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
686*a7c257b0Skamil const char *s2, size_t n, int result) {
687*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
688*a7c257b0Skamil return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
689*a7c257b0Skamil }
690*a7c257b0Skamil
691*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasecmp(void * called_pc,const char * s1,const char * s2,int result)692*a7c257b0Skamil void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
693*a7c257b0Skamil const char *s2, int result) {
694*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
695*a7c257b0Skamil return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
696*a7c257b0Skamil }
697*a7c257b0Skamil
698*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strstr(void * called_pc,const char * s1,const char * s2,char * result)699*a7c257b0Skamil void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
700*a7c257b0Skamil const char *s2, char *result) {
701*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
702*a7c257b0Skamil fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
703*a7c257b0Skamil }
704*a7c257b0Skamil
705*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_strcasestr(void * called_pc,const char * s1,const char * s2,char * result)706*a7c257b0Skamil void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
707*a7c257b0Skamil const char *s2, char *result) {
708*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
709*a7c257b0Skamil fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
710*a7c257b0Skamil }
711*a7c257b0Skamil
712*a7c257b0Skamil ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
__sanitizer_weak_hook_memmem(void * called_pc,const void * s1,size_t len1,const void * s2,size_t len2,void * result)713*a7c257b0Skamil void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
714*a7c257b0Skamil const void *s2, size_t len2, void *result) {
715*a7c257b0Skamil if (!fuzzer::RunningUserCallback) return;
716*a7c257b0Skamil fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
717*a7c257b0Skamil }
718*a7c257b0Skamil } // extern "C"
719