blob: e12e6c55cf09df6f09b2c8f2b125460c7b471ee5 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
16#include "FuzzerCorpus.h"
17#include "FuzzerDefs.h"
18#include "FuzzerDictionary.h"
19#include "FuzzerExtFunctions.h"
20#include "FuzzerIO.h"
21#include "FuzzerUtil.h"
22#include "FuzzerValueBitMap.h"
23#include <set>
24
25// The coverage counters and PCs.
26// These are declared as global variables named "__sancov_*" to simplify
27// experiments with inlined instrumentation.
28alignas(64) ATTRIBUTE_INTERFACE
29uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30
31ATTRIBUTE_INTERFACE
32uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33
kcc1c0379f2017-08-22 01:28:32 +000034// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse398297f2017-08-22 21:28:29 +000035ATTRIBUTE_INTERFACE __attribute__((tls_model("initial-exec")))
36thread_local uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000037
george.karpenkov29efa6d2017-08-21 23:25:50 +000038namespace fuzzer {
39
40TracePC TPC;
41
42int ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr;
43
44uint8_t *TracePC::Counters() const {
45 return __sancov_trace_pc_guard_8bit_counters;
46}
47
48uintptr_t *TracePC::PCs() const {
49 return __sancov_trace_pc_pcs;
50}
51
52size_t TracePC::GetTotalPCCoverage() {
53 if (ObservedPCs.size())
54 return ObservedPCs.size();
55 size_t Res = 0;
56 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
57 if (PCs()[i])
58 Res++;
59 return Res;
60}
61
dor1sf8e5f862018-07-16 14:54:23 +000062// Initializes unstable counters by copying Inline8bitCounters to unstable
63// counters.
64void TracePC::InitializeUnstableCounters() {
65 if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
66 size_t UnstableIdx = 0;
67 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
68 uint8_t *Beg = ModuleCounters[i].Start;
69 size_t Size = ModuleCounters[i].Stop - Beg;
70 assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
71 for (size_t j = 0; j < Size; j++, UnstableIdx++)
72 if (UnstableCounters[UnstableIdx] != kUnstableCounter)
73 UnstableCounters[UnstableIdx] = Beg[j];
74 }
75 }
76}
77
78// Compares the current counters with counters from previous runs
79// and records differences as unstable edges.
80void TracePC::UpdateUnstableCounters() {
81 if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
82 size_t UnstableIdx = 0;
83 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
84 uint8_t *Beg = ModuleCounters[i].Start;
85 size_t Size = ModuleCounters[i].Stop - Beg;
86 assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
87 for (size_t j = 0; j < Size; j++, UnstableIdx++)
88 if (Beg[j] != UnstableCounters[UnstableIdx])
89 UnstableCounters[UnstableIdx] = kUnstableCounter;
90 }
91 }
92}
george.karpenkov29efa6d2017-08-21 23:25:50 +000093
94void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
95 if (Start == Stop) return;
96 if (NumModulesWithInline8bitCounters &&
97 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
98 assert(NumModulesWithInline8bitCounters <
99 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
100 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
101 NumInline8bitCounters += Stop - Start;
102}
103
kcc98957a12017-08-25 19:29:47 +0000104void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
105 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
106 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000107 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
108 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
109 ModulePCTable[NumPCTables++] = {B, E};
110 NumPCsInPCTables += E - B;
111}
112
113void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
114 if (Start == Stop || *Start) return;
115 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
116 for (uint32_t *P = Start; P < Stop; P++) {
117 NumGuards++;
118 if (NumGuards == kNumPCs) {
119 RawPrint(
120 "WARNING: The binary has too many instrumented PCs.\n"
121 " You may want to reduce the size of the binary\n"
122 " for more efficient fuzzing and precise coverage data\n");
123 }
124 *P = NumGuards % kNumPCs;
125 }
126 Modules[NumModules].Start = Start;
127 Modules[NumModules].Stop = Stop;
128 NumModules++;
129}
130
131void TracePC::PrintModuleInfo() {
132 if (NumGuards) {
133 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
134 for (size_t i = 0; i < NumModules; i++)
135 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
136 Modules[i].Start, Modules[i].Stop);
137 Printf("\n");
138 }
139 if (NumModulesWithInline8bitCounters) {
140 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
141 NumModulesWithInline8bitCounters, NumInline8bitCounters);
142 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
143 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
144 ModuleCounters[i].Start, ModuleCounters[i].Stop);
145 Printf("\n");
146 }
147 if (NumPCTables) {
148 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
149 NumPCsInPCTables);
150 for (size_t i = 0; i < NumPCTables; i++) {
151 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
152 ModulePCTable[i].Start, ModulePCTable[i].Stop);
153 }
154 Printf("\n");
155
156 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
157 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
kcce220ebb2017-10-14 00:07:11 +0000158 Printf("ERROR: The size of coverage PC tables does not match the\n"
159 "number of instrumented PCs. This might be a compiler bug,\n"
160 "please contact the libFuzzer developers.\n"
161 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
162 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000163 _Exit(1);
164 }
165 }
delcypherfcd19a82018-04-20 06:46:19 +0000166 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
167 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000168}
169
170ATTRIBUTE_NO_SANITIZE_ALL
171void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
172 const uintptr_t kBits = 12;
173 const uintptr_t kMask = (1 << kBits) - 1;
174 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
175 ValueProfileMap.AddValueModPrime(Idx);
176}
177
178void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000179 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000180 auto ObservePC = [&](uintptr_t PC) {
kcc09b3e5f2018-07-06 19:47:00 +0000181 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
182 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", PC + 1);
183 Printf("\n");
184 }
kcc1c0379f2017-08-22 01:28:32 +0000185 };
kcc00da6482017-08-25 20:09:25 +0000186
187 auto Observe = [&](const PCTableEntry &TE) {
188 if (TE.PCFlags & 1)
kccec9da662017-08-28 22:52:22 +0000189 if (ObservedFuncs.insert(TE.PC).second && NumPrintNewFuncs)
190 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000191 ObservePC(TE.PC);
192 };
193
george.karpenkov29efa6d2017-08-21 23:25:50 +0000194 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000195 if (NumInline8bitCounters == NumPCsInPCTables) {
196 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
197 uint8_t *Beg = ModuleCounters[i].Start;
198 size_t Size = ModuleCounters[i].Stop - Beg;
199 assert(Size ==
200 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
201 for (size_t j = 0; j < Size; j++)
202 if (Beg[j])
kcc00da6482017-08-25 20:09:25 +0000203 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000204 }
205 } else if (NumGuards == NumPCsInPCTables) {
206 size_t GuardIdx = 1;
207 for (size_t i = 0; i < NumModules; i++) {
208 uint32_t *Beg = Modules[i].Start;
209 size_t Size = Modules[i].Stop - Beg;
210 assert(Size ==
211 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
212 for (size_t j = 0; j < Size; j++, GuardIdx++)
213 if (Counters()[GuardIdx])
kcc00da6482017-08-25 20:09:25 +0000214 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000215 }
216 }
217 }
kccec9da662017-08-28 22:52:22 +0000218
219 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N; i++) {
kcc873dc112018-06-07 21:15:24 +0000220 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
kcc09b3e5f2018-07-06 19:47:00 +0000221 PrintPC("%p %F %L", "%p", CoveredFuncs[i] + 1);
222 Printf("\n");
kccec9da662017-08-28 22:52:22 +0000223 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000224}
225
226inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
227 // TODO: this implementation is x86 only.
228 // see sanitizer_common GetPreviousInstructionPc for full implementation.
229 return PC - 1;
230}
231
232inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
233 // TODO: this implementation is x86 only.
234 // see sanitizer_common GetPreviousInstructionPc for full implementation.
235 return PC + 1;
236}
237
238static std::string GetModuleName(uintptr_t PC) {
239 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
240 void *OffsetRaw = nullptr;
241 if (!EF->__sanitizer_get_module_and_offset_for_pc(
242 reinterpret_cast<void *>(PC), ModulePathRaw,
243 sizeof(ModulePathRaw), &OffsetRaw))
244 return "";
245 return ModulePathRaw;
246}
247
kcc85cad3d2018-05-11 01:17:52 +0000248template<class CallBack>
249void TracePC::IterateCoveredFunctions(CallBack CB) {
250 for (size_t i = 0; i < NumPCTables; i++) {
251 auto &M = ModulePCTable[i];
252 assert(M.Start < M.Stop);
253 auto ModuleName = GetModuleName(M.Start->PC);
254 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
255 auto FE = NextFE;
256 assert((FE->PCFlags & 1) && "Not a function entry point");
257 do {
258 NextFE++;
259 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
260 if (ObservedFuncs.count(FE->PC))
261 CB(FE, NextFE);
262 }
263 }
264}
265
kcc3acbe072018-05-16 23:26:37 +0000266void TracePC::SetFocusFunction(const std::string &FuncName) {
267 // This function should be called once.
268 assert(FocusFunction.first > NumModulesWithInline8bitCounters);
269 if (FuncName.empty())
270 return;
271 for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
272 auto &PCTE = ModulePCTable[M];
273 size_t N = PCTE.Stop - PCTE.Start;
274 for (size_t I = 0; I < N; I++) {
275 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
276 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
277 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
278 Name = Name.substr(3, std::string::npos);
279 if (FuncName != Name) continue;
280 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
281 FocusFunction = {M, I};
282 return;
283 }
284 }
285}
286
287bool TracePC::ObservedFocusFunction() {
288 size_t I = FocusFunction.first;
289 size_t J = FocusFunction.second;
290 if (I >= NumModulesWithInline8bitCounters)
291 return false;
292 auto &MC = ModuleCounters[I];
293 size_t Size = MC.Stop - MC.Start;
294 if (J >= Size)
295 return false;
296 return MC.Start[J] != 0;
297}
298
george.karpenkov29efa6d2017-08-21 23:25:50 +0000299void TracePC::PrintCoverage() {
300 if (!EF->__sanitizer_symbolize_pc ||
301 !EF->__sanitizer_get_module_and_offset_for_pc) {
302 Printf("INFO: __sanitizer_symbolize_pc or "
303 "__sanitizer_get_module_and_offset_for_pc is not available,"
304 " not printing coverage\n");
305 return;
306 }
307 Printf("COVERAGE:\n");
kcc85cad3d2018-05-11 01:17:52 +0000308 auto CoveredFunctionCallback = [&](const PCTableEntry *First, const PCTableEntry *Last) {
309 assert(First < Last);
310 auto VisualizePC = GetNextInstructionPc(First->PC);
311 std::string FileStr = DescribePC("%s", VisualizePC);
312 if (!IsInterestingCoverageFile(FileStr)) return;
313 std::string FunctionStr = DescribePC("%F", VisualizePC);
314 std::string LineStr = DescribePC("%l", VisualizePC);
315 size_t Line = std::stoul(LineStr);
morehousef64b9402018-06-25 15:59:24 +0000316 Vector<uintptr_t> UncoveredPCs;
kcc85cad3d2018-05-11 01:17:52 +0000317 for (auto TE = First; TE < Last; TE++)
318 if (!ObservedPCs.count(TE->PC))
319 UncoveredPCs.push_back(TE->PC);
320 Printf("COVERED_FUNC: ");
321 UncoveredPCs.empty()
322 ? Printf("all")
323 : Printf("%zd/%zd", (Last - First) - UncoveredPCs.size(), Last - First);
324 Printf(" PCs covered %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(),
325 Line);
326 for (auto PC: UncoveredPCs) {
327 Printf(" UNCOVERED_PC: %s\n",
328 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000329 }
330 };
331
kcc85cad3d2018-05-11 01:17:52 +0000332 IterateCoveredFunctions(CoveredFunctionCallback);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000333}
334
kcca7dd2a92018-05-21 19:47:00 +0000335void TracePC::DumpCoverage() {
336 if (EF->__sanitizer_dump_coverage) {
337 Vector<uintptr_t> PCsCopy(GetNumPCs());
338 for (size_t i = 0; i < GetNumPCs(); i++)
339 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
340 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
341 }
342}
343
dor1sf8e5f862018-07-16 14:54:23 +0000344void TracePC::PrintUnstableStats() {
345 size_t count = 0;
346 for (size_t i = 0; i < NumInline8bitCounters; i++)
347 if (UnstableCounters[i] == kUnstableCounter)
348 count++;
349 Printf("stat::stability_rate: %.2f\n",
350 100 - static_cast<float>(count * 100) / NumInline8bitCounters);
351}
352
george.karpenkov29efa6d2017-08-21 23:25:50 +0000353// Value profile.
354// We keep track of various values that affect control flow.
355// These values are inserted into a bit-set-based hash map.
356// Every new bit in the map is treated as a new coverage.
357//
358// For memcmp/strcmp/etc the interesting value is the length of the common
359// prefix of the parameters.
360// For cmp instructions the interesting value is a XOR of the parameters.
361// The interesting value is mixed up with the PC and is then added to the map.
362
363ATTRIBUTE_NO_SANITIZE_ALL
364void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
365 size_t n, bool StopAtZero) {
366 if (!n) return;
367 size_t Len = std::min(n, Word::GetMaxSize());
368 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
369 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
370 uint8_t B1[Word::kMaxSize];
371 uint8_t B2[Word::kMaxSize];
372 // Copy the data into locals in this non-msan-instrumented function
373 // to avoid msan complaining further.
374 size_t Hash = 0; // Compute some simple hash of both strings.
375 for (size_t i = 0; i < Len; i++) {
376 B1[i] = A1[i];
377 B2[i] = A2[i];
378 size_t T = B1[i];
379 Hash ^= (T << 8) | B2[i];
380 }
381 size_t I = 0;
382 for (; I < Len; I++)
383 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
384 break;
385 size_t PC = reinterpret_cast<size_t>(caller_pc);
386 size_t Idx = (PC & 4095) | (I << 12);
387 ValueProfileMap.AddValue(Idx);
388 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
389}
390
391template <class T>
392ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
393ATTRIBUTE_NO_SANITIZE_ALL
394void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
395 uint64_t ArgXor = Arg1 ^ Arg2;
396 uint64_t ArgDistance = __builtin_popcountll(ArgXor) + 1; // [1,65]
397 uintptr_t Idx = ((PC & 4095) + 1) * ArgDistance;
398 if (sizeof(T) == 4)
dor1sf8e5f862018-07-16 14:54:23 +0000399 TORC4.Insert(ArgXor, Arg1, Arg2);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000400 else if (sizeof(T) == 8)
dor1sf8e5f862018-07-16 14:54:23 +0000401 TORC8.Insert(ArgXor, Arg1, Arg2);
kcc3850d062018-07-03 22:33:09 +0000402 // TODO: remove these flags and instead use all metrics at once.
403 if (UseValueProfileMask & 1)
404 ValueProfileMap.AddValue(Idx);
405 if (UseValueProfileMask & 2)
406 ValueProfileMap.AddValue(
407 PC * 64 + (Arg1 == Arg2 ? 0 : __builtin_clzll(Arg1 - Arg2) + 1));
408 if (UseValueProfileMask & 4) // alternative way to use the hamming distance
409 ValueProfileMap.AddValue(PC * 64 + ArgDistance);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000410}
411
412static size_t InternalStrnlen(const char *S, size_t MaxLen) {
413 size_t Len = 0;
414 for (; Len < MaxLen && S[Len]; Len++) {}
415 return Len;
416}
417
418// Finds min of (strlen(S1), strlen(S2)).
419// Needed bacause one of these strings may actually be non-zero terminated.
420static size_t InternalStrnlen2(const char *S1, const char *S2) {
421 size_t Len = 0;
422 for (; S1[Len] && S2[Len]; Len++) {}
423 return Len;
424}
425
426void TracePC::ClearInlineCounters() {
427 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
428 uint8_t *Beg = ModuleCounters[i].Start;
429 size_t Size = ModuleCounters[i].Stop - Beg;
430 memset(Beg, 0, Size);
431 }
432}
433
kcc0f3c0312017-08-22 01:50:00 +0000434ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000435void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000436 int stack;
437 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000438}
439
440uintptr_t TracePC::GetMaxStackOffset() const {
441 return InitialStack - __sancov_lowest_stack; // Stack grows down
442}
443
george.karpenkov29efa6d2017-08-21 23:25:50 +0000444} // namespace fuzzer
445
446extern "C" {
447ATTRIBUTE_INTERFACE
448ATTRIBUTE_NO_SANITIZE_ALL
449void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
450 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
451 uint32_t Idx = *Guard;
452 __sancov_trace_pc_pcs[Idx] = PC;
453 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000454}
455
456// Best-effort support for -fsanitize-coverage=trace-pc, which is available
457// in both Clang and GCC.
458ATTRIBUTE_INTERFACE
459ATTRIBUTE_NO_SANITIZE_ALL
460void __sanitizer_cov_trace_pc() {
461 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
462 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
463 __sancov_trace_pc_pcs[Idx] = PC;
464 __sancov_trace_pc_guard_8bit_counters[Idx]++;
465}
466
467ATTRIBUTE_INTERFACE
468void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
469 fuzzer::TPC.HandleInit(Start, Stop);
470}
471
472ATTRIBUTE_INTERFACE
473void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
474 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
475}
476
477ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000478void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
479 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000480 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
481}
482
483ATTRIBUTE_INTERFACE
484ATTRIBUTE_NO_SANITIZE_ALL
485void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
486 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
487 fuzzer::TPC.HandleCallerCallee(PC, Callee);
488}
489
490ATTRIBUTE_INTERFACE
491ATTRIBUTE_NO_SANITIZE_ALL
492ATTRIBUTE_TARGET_POPCNT
493void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
494 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
495 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
496}
497
498ATTRIBUTE_INTERFACE
499ATTRIBUTE_NO_SANITIZE_ALL
500ATTRIBUTE_TARGET_POPCNT
501// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
502// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
503// should be changed later to make full use of instrumentation.
504void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
505 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
506 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
507}
508
509ATTRIBUTE_INTERFACE
510ATTRIBUTE_NO_SANITIZE_ALL
511ATTRIBUTE_TARGET_POPCNT
512void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
513 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
514 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
515}
516
517ATTRIBUTE_INTERFACE
518ATTRIBUTE_NO_SANITIZE_ALL
519ATTRIBUTE_TARGET_POPCNT
520void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
521 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
522 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
523}
524
525ATTRIBUTE_INTERFACE
526ATTRIBUTE_NO_SANITIZE_ALL
527ATTRIBUTE_TARGET_POPCNT
528void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
529 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
530 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
531}
532
533ATTRIBUTE_INTERFACE
534ATTRIBUTE_NO_SANITIZE_ALL
535ATTRIBUTE_TARGET_POPCNT
536void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
537 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
538 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
539}
540
541ATTRIBUTE_INTERFACE
542ATTRIBUTE_NO_SANITIZE_ALL
543ATTRIBUTE_TARGET_POPCNT
544void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
545 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
546 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
547}
548
549ATTRIBUTE_INTERFACE
550ATTRIBUTE_NO_SANITIZE_ALL
551ATTRIBUTE_TARGET_POPCNT
552void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
553 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
554 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
555}
556
557ATTRIBUTE_INTERFACE
558ATTRIBUTE_NO_SANITIZE_ALL
559ATTRIBUTE_TARGET_POPCNT
560void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
561 uint64_t N = Cases[0];
562 uint64_t ValSizeInBits = Cases[1];
563 uint64_t *Vals = Cases + 2;
564 // Skip the most common and the most boring case.
565 if (Vals[N - 1] < 256 && Val < 256)
566 return;
567 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
568 size_t i;
569 uint64_t Token = 0;
570 for (i = 0; i < N; i++) {
571 Token = Val ^ Vals[i];
572 if (Val < Vals[i])
573 break;
574 }
575
576 if (ValSizeInBits == 16)
577 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
578 else if (ValSizeInBits == 32)
579 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
580 else
581 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
582}
583
584ATTRIBUTE_INTERFACE
585ATTRIBUTE_NO_SANITIZE_ALL
586ATTRIBUTE_TARGET_POPCNT
587void __sanitizer_cov_trace_div4(uint32_t Val) {
588 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
589 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
590}
591
592ATTRIBUTE_INTERFACE
593ATTRIBUTE_NO_SANITIZE_ALL
594ATTRIBUTE_TARGET_POPCNT
595void __sanitizer_cov_trace_div8(uint64_t Val) {
596 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
597 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
598}
599
600ATTRIBUTE_INTERFACE
601ATTRIBUTE_NO_SANITIZE_ALL
602ATTRIBUTE_TARGET_POPCNT
603void __sanitizer_cov_trace_gep(uintptr_t Idx) {
604 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
605 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
606}
607
608ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
609void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
610 const void *s2, size_t n, int result) {
611 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
612 if (result == 0) return; // No reason to mutate.
613 if (n <= 1) return; // Not interesting.
614 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
615}
616
617ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
618void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
619 const char *s2, size_t n, int result) {
620 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
621 if (result == 0) return; // No reason to mutate.
622 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
623 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
624 n = std::min(n, Len1);
625 n = std::min(n, Len2);
626 if (n <= 1) return; // Not interesting.
627 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
628}
629
630ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
631void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
dor1sf8e5f862018-07-16 14:54:23 +0000632 const char *s2, int result) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000633 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
634 if (result == 0) return; // No reason to mutate.
635 size_t N = fuzzer::InternalStrnlen2(s1, s2);
636 if (N <= 1) return; // Not interesting.
637 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
638}
639
640ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
641void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
642 const char *s2, size_t n, int result) {
643 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
644 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
645}
646
647ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
648void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
649 const char *s2, int result) {
650 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
651 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
652}
653
654ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
655void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
656 const char *s2, char *result) {
657 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
658 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
659}
660
661ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
662void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
663 const char *s2, char *result) {
664 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
665 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
666}
667
668ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
669void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
670 const void *s2, size_t len2, void *result) {
671 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
672 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
673}
674} // extern "C"