blob: 701ef087319a249354cef09c8db48fa086633dcf [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
16#include "FuzzerCorpus.h"
17#include "FuzzerDefs.h"
18#include "FuzzerDictionary.h"
19#include "FuzzerExtFunctions.h"
20#include "FuzzerIO.h"
21#include "FuzzerUtil.h"
22#include "FuzzerValueBitMap.h"
23#include <set>
24
25// The coverage counters and PCs.
26// These are declared as global variables named "__sancov_*" to simplify
27// experiments with inlined instrumentation.
28alignas(64) ATTRIBUTE_INTERFACE
29uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30
31ATTRIBUTE_INTERFACE
32uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33
kcc1c0379f2017-08-22 01:28:32 +000034// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse398297f2017-08-22 21:28:29 +000035ATTRIBUTE_INTERFACE __attribute__((tls_model("initial-exec")))
36thread_local uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000037
george.karpenkov29efa6d2017-08-21 23:25:50 +000038namespace fuzzer {
39
40TracePC TPC;
41
42int ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr;
43
44uint8_t *TracePC::Counters() const {
45 return __sancov_trace_pc_guard_8bit_counters;
46}
47
48uintptr_t *TracePC::PCs() const {
49 return __sancov_trace_pc_pcs;
50}
51
52size_t TracePC::GetTotalPCCoverage() {
53 if (ObservedPCs.size())
54 return ObservedPCs.size();
55 size_t Res = 0;
56 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
57 if (PCs()[i])
58 Res++;
59 return Res;
60}
61
62
63void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
64 if (Start == Stop) return;
65 if (NumModulesWithInline8bitCounters &&
66 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
67 assert(NumModulesWithInline8bitCounters <
68 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
69 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
70 NumInline8bitCounters += Stop - Start;
71}
72
kcc98957a12017-08-25 19:29:47 +000073void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
74 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
75 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +000076 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
77 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
78 ModulePCTable[NumPCTables++] = {B, E};
79 NumPCsInPCTables += E - B;
80}
81
82void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
83 if (Start == Stop || *Start) return;
84 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
85 for (uint32_t *P = Start; P < Stop; P++) {
86 NumGuards++;
87 if (NumGuards == kNumPCs) {
88 RawPrint(
89 "WARNING: The binary has too many instrumented PCs.\n"
90 " You may want to reduce the size of the binary\n"
91 " for more efficient fuzzing and precise coverage data\n");
92 }
93 *P = NumGuards % kNumPCs;
94 }
95 Modules[NumModules].Start = Start;
96 Modules[NumModules].Stop = Stop;
97 NumModules++;
98}
99
100void TracePC::PrintModuleInfo() {
101 if (NumGuards) {
102 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
103 for (size_t i = 0; i < NumModules; i++)
104 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
105 Modules[i].Start, Modules[i].Stop);
106 Printf("\n");
107 }
108 if (NumModulesWithInline8bitCounters) {
109 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
110 NumModulesWithInline8bitCounters, NumInline8bitCounters);
111 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
112 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
113 ModuleCounters[i].Start, ModuleCounters[i].Stop);
114 Printf("\n");
115 }
116 if (NumPCTables) {
117 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
118 NumPCsInPCTables);
119 for (size_t i = 0; i < NumPCTables; i++) {
120 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
121 ModulePCTable[i].Start, ModulePCTable[i].Stop);
122 }
123 Printf("\n");
124
125 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
126 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
kcce220ebb2017-10-14 00:07:11 +0000127 Printf("ERROR: The size of coverage PC tables does not match the\n"
128 "number of instrumented PCs. This might be a compiler bug,\n"
129 "please contact the libFuzzer developers.\n"
130 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
131 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000132 _Exit(1);
133 }
134 }
kcc1c0379f2017-08-22 01:28:32 +0000135 if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin())
136 Printf("INFO: %zd Clang Coverage Counters\n", NumClangCounters);
delcypherfcd19a82018-04-20 06:46:19 +0000137
138 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
139 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000140}
141
142ATTRIBUTE_NO_SANITIZE_ALL
143void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
144 const uintptr_t kBits = 12;
145 const uintptr_t kMask = (1 << kBits) - 1;
146 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
147 ValueProfileMap.AddValueModPrime(Idx);
148}
149
150void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000151 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000152 auto ObservePC = [&](uintptr_t PC) {
153 if (ObservedPCs.insert(PC).second && DoPrintNewPCs)
kcc1c0379f2017-08-22 01:28:32 +0000154 PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1);
155 };
kcc00da6482017-08-25 20:09:25 +0000156
157 auto Observe = [&](const PCTableEntry &TE) {
158 if (TE.PCFlags & 1)
kccec9da662017-08-28 22:52:22 +0000159 if (ObservedFuncs.insert(TE.PC).second && NumPrintNewFuncs)
160 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000161 ObservePC(TE.PC);
162 };
163
george.karpenkov29efa6d2017-08-21 23:25:50 +0000164 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000165 if (NumInline8bitCounters == NumPCsInPCTables) {
166 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
167 uint8_t *Beg = ModuleCounters[i].Start;
168 size_t Size = ModuleCounters[i].Stop - Beg;
169 assert(Size ==
170 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
171 for (size_t j = 0; j < Size; j++)
172 if (Beg[j])
kcc00da6482017-08-25 20:09:25 +0000173 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000174 }
175 } else if (NumGuards == NumPCsInPCTables) {
176 size_t GuardIdx = 1;
177 for (size_t i = 0; i < NumModules; i++) {
178 uint32_t *Beg = Modules[i].Start;
179 size_t Size = Modules[i].Stop - Beg;
180 assert(Size ==
181 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
182 for (size_t j = 0; j < Size; j++, GuardIdx++)
183 if (Counters()[GuardIdx])
kcc00da6482017-08-25 20:09:25 +0000184 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000185 }
186 }
187 }
kcc1c0379f2017-08-22 01:28:32 +0000188 if (size_t NumClangCounters =
189 ClangCountersEnd() - ClangCountersBegin()) {
190 auto P = ClangCountersBegin();
191 for (size_t Idx = 0; Idx < NumClangCounters; Idx++)
192 if (P[Idx])
kcc00da6482017-08-25 20:09:25 +0000193 ObservePC((uintptr_t)Idx);
kcc1c0379f2017-08-22 01:28:32 +0000194 }
kccec9da662017-08-28 22:52:22 +0000195
196 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N; i++) {
197 Printf("\tNEW_FUNC[%zd/%zd]: ", i, CoveredFuncs.size());
198 PrintPC("%p %F %L\n", "%p\n", CoveredFuncs[i] + 1);
199 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000200}
201
202inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
203 // TODO: this implementation is x86 only.
204 // see sanitizer_common GetPreviousInstructionPc for full implementation.
205 return PC - 1;
206}
207
208inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
209 // TODO: this implementation is x86 only.
210 // see sanitizer_common GetPreviousInstructionPc for full implementation.
211 return PC + 1;
212}
213
214static std::string GetModuleName(uintptr_t PC) {
215 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
216 void *OffsetRaw = nullptr;
217 if (!EF->__sanitizer_get_module_and_offset_for_pc(
218 reinterpret_cast<void *>(PC), ModulePathRaw,
219 sizeof(ModulePathRaw), &OffsetRaw))
220 return "";
221 return ModulePathRaw;
222}
223
224void TracePC::PrintCoverage() {
225 if (!EF->__sanitizer_symbolize_pc ||
226 !EF->__sanitizer_get_module_and_offset_for_pc) {
227 Printf("INFO: __sanitizer_symbolize_pc or "
228 "__sanitizer_get_module_and_offset_for_pc is not available,"
229 " not printing coverage\n");
230 return;
231 }
232 Printf("COVERAGE:\n");
233 std::string LastFunctionName = "";
234 std::string LastFileStr = "";
george.karpenkovfbfa45c2017-08-27 23:20:09 +0000235 Set<size_t> UncoveredLines;
236 Set<size_t> CoveredLines;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000237
238 auto FunctionEndCallback = [&](const std::string &CurrentFunc,
239 const std::string &CurrentFile) {
240 if (LastFunctionName != CurrentFunc) {
241 if (CoveredLines.empty() && !UncoveredLines.empty()) {
242 Printf("UNCOVERED_FUNC: %s\n", LastFunctionName.c_str());
243 } else {
244 for (auto Line : UncoveredLines) {
245 if (!CoveredLines.count(Line))
246 Printf("UNCOVERED_LINE: %s %s:%zd\n", LastFunctionName.c_str(),
247 LastFileStr.c_str(), Line);
248 }
249 }
250
251 UncoveredLines.clear();
252 CoveredLines.clear();
253 LastFunctionName = CurrentFunc;
254 LastFileStr = CurrentFile;
255 }
256 };
257
258 for (size_t i = 0; i < NumPCTables; i++) {
259 auto &M = ModulePCTable[i];
260 assert(M.Start < M.Stop);
kcc98957a12017-08-25 19:29:47 +0000261 auto ModuleName = GetModuleName(M.Start->PC);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000262 for (auto Ptr = M.Start; Ptr < M.Stop; Ptr++) {
kcc98957a12017-08-25 19:29:47 +0000263 auto PC = Ptr->PC;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000264 auto VisualizePC = GetNextInstructionPc(PC);
265 bool IsObserved = ObservedPCs.count(PC);
266 std::string FileStr = DescribePC("%s", VisualizePC);
267 if (!IsInterestingCoverageFile(FileStr)) continue;
268 std::string FunctionStr = DescribePC("%F", VisualizePC);
269 FunctionEndCallback(FunctionStr, FileStr);
270 std::string LineStr = DescribePC("%l", VisualizePC);
271 size_t Line = std::stoul(LineStr);
272 if (IsObserved && CoveredLines.insert(Line).second)
273 Printf("COVERED: %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(),
274 Line);
275 else
276 UncoveredLines.insert(Line);
277 }
278 }
279 FunctionEndCallback("", "");
280}
281
282void TracePC::DumpCoverage() {
283 if (EF->__sanitizer_dump_coverage) {
george.karpenkovfbfa45c2017-08-27 23:20:09 +0000284 Vector<uintptr_t> PCsCopy(GetNumPCs());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000285 for (size_t i = 0; i < GetNumPCs(); i++)
286 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
287 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
288 }
289}
290
291// Value profile.
292// We keep track of various values that affect control flow.
293// These values are inserted into a bit-set-based hash map.
294// Every new bit in the map is treated as a new coverage.
295//
296// For memcmp/strcmp/etc the interesting value is the length of the common
297// prefix of the parameters.
298// For cmp instructions the interesting value is a XOR of the parameters.
299// The interesting value is mixed up with the PC and is then added to the map.
300
301ATTRIBUTE_NO_SANITIZE_ALL
302void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
303 size_t n, bool StopAtZero) {
304 if (!n) return;
305 size_t Len = std::min(n, Word::GetMaxSize());
306 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
307 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
308 uint8_t B1[Word::kMaxSize];
309 uint8_t B2[Word::kMaxSize];
310 // Copy the data into locals in this non-msan-instrumented function
311 // to avoid msan complaining further.
312 size_t Hash = 0; // Compute some simple hash of both strings.
313 for (size_t i = 0; i < Len; i++) {
314 B1[i] = A1[i];
315 B2[i] = A2[i];
316 size_t T = B1[i];
317 Hash ^= (T << 8) | B2[i];
318 }
319 size_t I = 0;
320 for (; I < Len; I++)
321 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
322 break;
323 size_t PC = reinterpret_cast<size_t>(caller_pc);
324 size_t Idx = (PC & 4095) | (I << 12);
325 ValueProfileMap.AddValue(Idx);
326 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
327}
328
329template <class T>
330ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
331ATTRIBUTE_NO_SANITIZE_ALL
332void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
333 uint64_t ArgXor = Arg1 ^ Arg2;
334 uint64_t ArgDistance = __builtin_popcountll(ArgXor) + 1; // [1,65]
335 uintptr_t Idx = ((PC & 4095) + 1) * ArgDistance;
336 if (sizeof(T) == 4)
337 TORC4.Insert(ArgXor, Arg1, Arg2);
338 else if (sizeof(T) == 8)
339 TORC8.Insert(ArgXor, Arg1, Arg2);
340 ValueProfileMap.AddValue(Idx);
341}
342
343static size_t InternalStrnlen(const char *S, size_t MaxLen) {
344 size_t Len = 0;
345 for (; Len < MaxLen && S[Len]; Len++) {}
346 return Len;
347}
348
349// Finds min of (strlen(S1), strlen(S2)).
350// Needed bacause one of these strings may actually be non-zero terminated.
351static size_t InternalStrnlen2(const char *S1, const char *S2) {
352 size_t Len = 0;
353 for (; S1[Len] && S2[Len]; Len++) {}
354 return Len;
355}
356
357void TracePC::ClearInlineCounters() {
358 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
359 uint8_t *Beg = ModuleCounters[i].Start;
360 size_t Size = ModuleCounters[i].Stop - Beg;
361 memset(Beg, 0, Size);
362 }
363}
364
kcc0f3c0312017-08-22 01:50:00 +0000365ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000366void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000367 int stack;
368 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000369}
370
371uintptr_t TracePC::GetMaxStackOffset() const {
372 return InitialStack - __sancov_lowest_stack; // Stack grows down
373}
374
george.karpenkov29efa6d2017-08-21 23:25:50 +0000375} // namespace fuzzer
376
377extern "C" {
378ATTRIBUTE_INTERFACE
379ATTRIBUTE_NO_SANITIZE_ALL
380void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
381 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
382 uint32_t Idx = *Guard;
383 __sancov_trace_pc_pcs[Idx] = PC;
384 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000385}
386
387// Best-effort support for -fsanitize-coverage=trace-pc, which is available
388// in both Clang and GCC.
389ATTRIBUTE_INTERFACE
390ATTRIBUTE_NO_SANITIZE_ALL
391void __sanitizer_cov_trace_pc() {
392 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
393 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
394 __sancov_trace_pc_pcs[Idx] = PC;
395 __sancov_trace_pc_guard_8bit_counters[Idx]++;
396}
397
398ATTRIBUTE_INTERFACE
399void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
400 fuzzer::TPC.HandleInit(Start, Stop);
401}
402
403ATTRIBUTE_INTERFACE
404void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
405 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
406}
407
408ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000409void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
410 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000411 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
412}
413
414ATTRIBUTE_INTERFACE
415ATTRIBUTE_NO_SANITIZE_ALL
416void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
417 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
418 fuzzer::TPC.HandleCallerCallee(PC, Callee);
419}
420
421ATTRIBUTE_INTERFACE
422ATTRIBUTE_NO_SANITIZE_ALL
423ATTRIBUTE_TARGET_POPCNT
424void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
425 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
426 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
427}
428
429ATTRIBUTE_INTERFACE
430ATTRIBUTE_NO_SANITIZE_ALL
431ATTRIBUTE_TARGET_POPCNT
432// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
433// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
434// should be changed later to make full use of instrumentation.
435void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
436 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
437 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
438}
439
440ATTRIBUTE_INTERFACE
441ATTRIBUTE_NO_SANITIZE_ALL
442ATTRIBUTE_TARGET_POPCNT
443void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
444 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
445 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
446}
447
448ATTRIBUTE_INTERFACE
449ATTRIBUTE_NO_SANITIZE_ALL
450ATTRIBUTE_TARGET_POPCNT
451void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
452 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
453 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
454}
455
456ATTRIBUTE_INTERFACE
457ATTRIBUTE_NO_SANITIZE_ALL
458ATTRIBUTE_TARGET_POPCNT
459void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
460 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
461 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
462}
463
464ATTRIBUTE_INTERFACE
465ATTRIBUTE_NO_SANITIZE_ALL
466ATTRIBUTE_TARGET_POPCNT
467void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
468 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
469 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
470}
471
472ATTRIBUTE_INTERFACE
473ATTRIBUTE_NO_SANITIZE_ALL
474ATTRIBUTE_TARGET_POPCNT
475void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
476 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
477 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
478}
479
480ATTRIBUTE_INTERFACE
481ATTRIBUTE_NO_SANITIZE_ALL
482ATTRIBUTE_TARGET_POPCNT
483void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
484 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
485 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
486}
487
488ATTRIBUTE_INTERFACE
489ATTRIBUTE_NO_SANITIZE_ALL
490ATTRIBUTE_TARGET_POPCNT
491void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
492 uint64_t N = Cases[0];
493 uint64_t ValSizeInBits = Cases[1];
494 uint64_t *Vals = Cases + 2;
495 // Skip the most common and the most boring case.
496 if (Vals[N - 1] < 256 && Val < 256)
497 return;
498 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
499 size_t i;
500 uint64_t Token = 0;
501 for (i = 0; i < N; i++) {
502 Token = Val ^ Vals[i];
503 if (Val < Vals[i])
504 break;
505 }
506
507 if (ValSizeInBits == 16)
508 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
509 else if (ValSizeInBits == 32)
510 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
511 else
512 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
513}
514
515ATTRIBUTE_INTERFACE
516ATTRIBUTE_NO_SANITIZE_ALL
517ATTRIBUTE_TARGET_POPCNT
518void __sanitizer_cov_trace_div4(uint32_t Val) {
519 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
520 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
521}
522
523ATTRIBUTE_INTERFACE
524ATTRIBUTE_NO_SANITIZE_ALL
525ATTRIBUTE_TARGET_POPCNT
526void __sanitizer_cov_trace_div8(uint64_t Val) {
527 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
528 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
529}
530
531ATTRIBUTE_INTERFACE
532ATTRIBUTE_NO_SANITIZE_ALL
533ATTRIBUTE_TARGET_POPCNT
534void __sanitizer_cov_trace_gep(uintptr_t Idx) {
535 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
536 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
537}
538
539ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
540void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
541 const void *s2, size_t n, int result) {
542 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
543 if (result == 0) return; // No reason to mutate.
544 if (n <= 1) return; // Not interesting.
545 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
546}
547
548ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
549void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
550 const char *s2, size_t n, int result) {
551 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
552 if (result == 0) return; // No reason to mutate.
553 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
554 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
555 n = std::min(n, Len1);
556 n = std::min(n, Len2);
557 if (n <= 1) return; // Not interesting.
558 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
559}
560
561ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
562void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
563 const char *s2, int result) {
564 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
565 if (result == 0) return; // No reason to mutate.
566 size_t N = fuzzer::InternalStrnlen2(s1, s2);
567 if (N <= 1) return; // Not interesting.
568 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
569}
570
571ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
572void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
573 const char *s2, size_t n, int result) {
574 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
575 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
576}
577
578ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
579void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
580 const char *s2, int result) {
581 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
582 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
583}
584
585ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
586void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
587 const char *s2, char *result) {
588 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
589 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
590}
591
592ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
593void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
594 const char *s2, char *result) {
595 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
596 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
597}
598
599ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
600void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
601 const void *s2, size_t len2, void *result) {
602 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
603 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
604}
605} // extern "C"