blob: 983b47411d888aa223dc96d5b319983f72965227 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
16#include "FuzzerCorpus.h"
17#include "FuzzerDefs.h"
18#include "FuzzerDictionary.h"
19#include "FuzzerExtFunctions.h"
20#include "FuzzerIO.h"
21#include "FuzzerUtil.h"
22#include "FuzzerValueBitMap.h"
23#include <set>
24
25// The coverage counters and PCs.
26// These are declared as global variables named "__sancov_*" to simplify
27// experiments with inlined instrumentation.
28alignas(64) ATTRIBUTE_INTERFACE
29uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30
31ATTRIBUTE_INTERFACE
32uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33
kcc1c0379f2017-08-22 01:28:32 +000034// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse398297f2017-08-22 21:28:29 +000035ATTRIBUTE_INTERFACE __attribute__((tls_model("initial-exec")))
36thread_local uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000037
george.karpenkov29efa6d2017-08-21 23:25:50 +000038namespace fuzzer {
39
40TracePC TPC;
41
george.karpenkov29efa6d2017-08-21 23:25:50 +000042uint8_t *TracePC::Counters() const {
43 return __sancov_trace_pc_guard_8bit_counters;
44}
45
46uintptr_t *TracePC::PCs() const {
47 return __sancov_trace_pc_pcs;
48}
49
50size_t TracePC::GetTotalPCCoverage() {
51 if (ObservedPCs.size())
52 return ObservedPCs.size();
53 size_t Res = 0;
54 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
55 if (PCs()[i])
56 Res++;
57 return Res;
58}
59
dor1sd7197f42018-07-18 17:03:27 +000060template<class CallBack>
61void TracePC::IterateInline8bitCounters(CallBack CB) const {
dor1sbb933292018-07-16 16:01:31 +000062 if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
dor1sd7197f42018-07-18 17:03:27 +000063 size_t CounterIdx = 0;
dor1sbb933292018-07-16 16:01:31 +000064 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
65 uint8_t *Beg = ModuleCounters[i].Start;
66 size_t Size = ModuleCounters[i].Stop - Beg;
67 assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
dor1sd7197f42018-07-18 17:03:27 +000068 for (size_t j = 0; j < Size; j++, CounterIdx++)
69 CB(i, j, CounterIdx);
dor1sbb933292018-07-16 16:01:31 +000070 }
71 }
72}
73
dor1sd7197f42018-07-18 17:03:27 +000074// Initializes unstable counters by copying Inline8bitCounters to unstable
75// counters.
76void TracePC::InitializeUnstableCounters() {
77 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
78 if (UnstableCounters[UnstableIdx] != kUnstableCounter)
79 UnstableCounters[UnstableIdx] = ModuleCounters[i].Start[j];
80 });
81}
82
dor1sbb933292018-07-16 16:01:31 +000083// Compares the current counters with counters from previous runs
84// and records differences as unstable edges.
85void TracePC::UpdateUnstableCounters() {
dor1sd7197f42018-07-18 17:03:27 +000086 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
87 if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx])
88 UnstableCounters[UnstableIdx] = kUnstableCounter;
89 });
dor1sbb933292018-07-16 16:01:31 +000090}
george.karpenkov29efa6d2017-08-21 23:25:50 +000091
92void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
93 if (Start == Stop) return;
94 if (NumModulesWithInline8bitCounters &&
95 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
96 assert(NumModulesWithInline8bitCounters <
97 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
98 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
99 NumInline8bitCounters += Stop - Start;
100}
101
kcc98957a12017-08-25 19:29:47 +0000102void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
103 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
104 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000105 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
106 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
107 ModulePCTable[NumPCTables++] = {B, E};
108 NumPCsInPCTables += E - B;
109}
110
111void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
112 if (Start == Stop || *Start) return;
113 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
114 for (uint32_t *P = Start; P < Stop; P++) {
115 NumGuards++;
116 if (NumGuards == kNumPCs) {
117 RawPrint(
118 "WARNING: The binary has too many instrumented PCs.\n"
119 " You may want to reduce the size of the binary\n"
120 " for more efficient fuzzing and precise coverage data\n");
121 }
122 *P = NumGuards % kNumPCs;
123 }
124 Modules[NumModules].Start = Start;
125 Modules[NumModules].Stop = Stop;
126 NumModules++;
127}
128
129void TracePC::PrintModuleInfo() {
130 if (NumGuards) {
131 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
132 for (size_t i = 0; i < NumModules; i++)
133 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
134 Modules[i].Start, Modules[i].Stop);
135 Printf("\n");
136 }
137 if (NumModulesWithInline8bitCounters) {
138 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
139 NumModulesWithInline8bitCounters, NumInline8bitCounters);
140 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
141 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
142 ModuleCounters[i].Start, ModuleCounters[i].Stop);
143 Printf("\n");
144 }
145 if (NumPCTables) {
146 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
147 NumPCsInPCTables);
148 for (size_t i = 0; i < NumPCTables; i++) {
149 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
150 ModulePCTable[i].Start, ModulePCTable[i].Stop);
151 }
152 Printf("\n");
153
154 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
155 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
kcce220ebb2017-10-14 00:07:11 +0000156 Printf("ERROR: The size of coverage PC tables does not match the\n"
157 "number of instrumented PCs. This might be a compiler bug,\n"
158 "please contact the libFuzzer developers.\n"
159 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
160 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000161 _Exit(1);
162 }
163 }
delcypherfcd19a82018-04-20 06:46:19 +0000164 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
165 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000166}
167
168ATTRIBUTE_NO_SANITIZE_ALL
169void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
170 const uintptr_t kBits = 12;
171 const uintptr_t kMask = (1 << kBits) - 1;
172 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
173 ValueProfileMap.AddValueModPrime(Idx);
174}
175
176void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000177 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000178 auto ObservePC = [&](uintptr_t PC) {
kcc09b3e5f2018-07-06 19:47:00 +0000179 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
180 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", PC + 1);
181 Printf("\n");
182 }
kcc1c0379f2017-08-22 01:28:32 +0000183 };
kcc00da6482017-08-25 20:09:25 +0000184
185 auto Observe = [&](const PCTableEntry &TE) {
186 if (TE.PCFlags & 1)
kccb3080d02018-07-19 22:00:48 +0000187 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
kccec9da662017-08-28 22:52:22 +0000188 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000189 ObservePC(TE.PC);
190 };
191
george.karpenkov29efa6d2017-08-21 23:25:50 +0000192 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000193 if (NumInline8bitCounters == NumPCsInPCTables) {
dor1sd7197f42018-07-18 17:03:27 +0000194 IterateInline8bitCounters([&](int i, int j, int CounterIdx) {
195 if (ModuleCounters[i].Start[j])
196 Observe(ModulePCTable[i].Start[j]);
197 });
george.karpenkov29efa6d2017-08-21 23:25:50 +0000198 } else if (NumGuards == NumPCsInPCTables) {
199 size_t GuardIdx = 1;
200 for (size_t i = 0; i < NumModules; i++) {
201 uint32_t *Beg = Modules[i].Start;
202 size_t Size = Modules[i].Stop - Beg;
203 assert(Size ==
204 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
205 for (size_t j = 0; j < Size; j++, GuardIdx++)
206 if (Counters()[GuardIdx])
kcc00da6482017-08-25 20:09:25 +0000207 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000208 }
209 }
210 }
kccec9da662017-08-28 22:52:22 +0000211
kccb3080d02018-07-19 22:00:48 +0000212 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
213 i++) {
kcc873dc112018-06-07 21:15:24 +0000214 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
kcc09b3e5f2018-07-06 19:47:00 +0000215 PrintPC("%p %F %L", "%p", CoveredFuncs[i] + 1);
216 Printf("\n");
kccec9da662017-08-28 22:52:22 +0000217 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000218}
219
220inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
221 // TODO: this implementation is x86 only.
222 // see sanitizer_common GetPreviousInstructionPc for full implementation.
223 return PC - 1;
224}
225
226inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
227 // TODO: this implementation is x86 only.
228 // see sanitizer_common GetPreviousInstructionPc for full implementation.
229 return PC + 1;
230}
231
232static std::string GetModuleName(uintptr_t PC) {
233 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
234 void *OffsetRaw = nullptr;
235 if (!EF->__sanitizer_get_module_and_offset_for_pc(
236 reinterpret_cast<void *>(PC), ModulePathRaw,
237 sizeof(ModulePathRaw), &OffsetRaw))
238 return "";
239 return ModulePathRaw;
240}
241
kcc85cad3d2018-05-11 01:17:52 +0000242template<class CallBack>
243void TracePC::IterateCoveredFunctions(CallBack CB) {
244 for (size_t i = 0; i < NumPCTables; i++) {
245 auto &M = ModulePCTable[i];
246 assert(M.Start < M.Stop);
247 auto ModuleName = GetModuleName(M.Start->PC);
248 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
249 auto FE = NextFE;
250 assert((FE->PCFlags & 1) && "Not a function entry point");
251 do {
252 NextFE++;
253 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
254 if (ObservedFuncs.count(FE->PC))
kccb3080d02018-07-19 22:00:48 +0000255 CB(FE, NextFE, ObservedFuncs[FE->PC]);
kcc85cad3d2018-05-11 01:17:52 +0000256 }
257 }
258}
259
kcc3acbe072018-05-16 23:26:37 +0000260void TracePC::SetFocusFunction(const std::string &FuncName) {
261 // This function should be called once.
262 assert(FocusFunction.first > NumModulesWithInline8bitCounters);
263 if (FuncName.empty())
264 return;
265 for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
266 auto &PCTE = ModulePCTable[M];
267 size_t N = PCTE.Stop - PCTE.Start;
268 for (size_t I = 0; I < N; I++) {
269 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
270 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
271 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
272 Name = Name.substr(3, std::string::npos);
273 if (FuncName != Name) continue;
274 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
275 FocusFunction = {M, I};
276 return;
277 }
278 }
279}
280
281bool TracePC::ObservedFocusFunction() {
282 size_t I = FocusFunction.first;
283 size_t J = FocusFunction.second;
284 if (I >= NumModulesWithInline8bitCounters)
285 return false;
286 auto &MC = ModuleCounters[I];
287 size_t Size = MC.Stop - MC.Start;
288 if (J >= Size)
289 return false;
290 return MC.Start[J] != 0;
291}
292
george.karpenkov29efa6d2017-08-21 23:25:50 +0000293void TracePC::PrintCoverage() {
294 if (!EF->__sanitizer_symbolize_pc ||
295 !EF->__sanitizer_get_module_and_offset_for_pc) {
296 Printf("INFO: __sanitizer_symbolize_pc or "
297 "__sanitizer_get_module_and_offset_for_pc is not available,"
298 " not printing coverage\n");
299 return;
300 }
301 Printf("COVERAGE:\n");
kccb3080d02018-07-19 22:00:48 +0000302 auto CoveredFunctionCallback = [&](const PCTableEntry *First,
303 const PCTableEntry *Last,
304 uintptr_t Counter) {
kcc85cad3d2018-05-11 01:17:52 +0000305 assert(First < Last);
306 auto VisualizePC = GetNextInstructionPc(First->PC);
307 std::string FileStr = DescribePC("%s", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000308 if (!IsInterestingCoverageFile(FileStr))
309 return;
kcc85cad3d2018-05-11 01:17:52 +0000310 std::string FunctionStr = DescribePC("%F", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000311 if (FunctionStr.find("in ") == 0)
312 FunctionStr = FunctionStr.substr(3);
kcc85cad3d2018-05-11 01:17:52 +0000313 std::string LineStr = DescribePC("%l", VisualizePC);
314 size_t Line = std::stoul(LineStr);
kccb3080d02018-07-19 22:00:48 +0000315 size_t NumEdges = Last - First;
morehousef64b9402018-06-25 15:59:24 +0000316 Vector<uintptr_t> UncoveredPCs;
kcc85cad3d2018-05-11 01:17:52 +0000317 for (auto TE = First; TE < Last; TE++)
318 if (!ObservedPCs.count(TE->PC))
319 UncoveredPCs.push_back(TE->PC);
kccb3080d02018-07-19 22:00:48 +0000320 Printf("COVERED_FUNC: hits: %zd", Counter);
321 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
322 Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
323 for (auto PC: UncoveredPCs)
kcc85cad3d2018-05-11 01:17:52 +0000324 Printf(" UNCOVERED_PC: %s\n",
325 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000326 };
327
kcc85cad3d2018-05-11 01:17:52 +0000328 IterateCoveredFunctions(CoveredFunctionCallback);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000329}
330
kcca7dd2a92018-05-21 19:47:00 +0000331void TracePC::DumpCoverage() {
332 if (EF->__sanitizer_dump_coverage) {
333 Vector<uintptr_t> PCsCopy(GetNumPCs());
334 for (size_t i = 0; i < GetNumPCs(); i++)
335 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
336 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
337 }
338}
339
dor1sbb933292018-07-16 16:01:31 +0000340void TracePC::PrintUnstableStats() {
341 size_t count = 0;
342 for (size_t i = 0; i < NumInline8bitCounters; i++)
343 if (UnstableCounters[i] == kUnstableCounter)
344 count++;
345 Printf("stat::stability_rate: %.2f\n",
346 100 - static_cast<float>(count * 100) / NumInline8bitCounters);
347}
348
george.karpenkov29efa6d2017-08-21 23:25:50 +0000349// Value profile.
350// We keep track of various values that affect control flow.
351// These values are inserted into a bit-set-based hash map.
352// Every new bit in the map is treated as a new coverage.
353//
354// For memcmp/strcmp/etc the interesting value is the length of the common
355// prefix of the parameters.
356// For cmp instructions the interesting value is a XOR of the parameters.
357// The interesting value is mixed up with the PC and is then added to the map.
358
359ATTRIBUTE_NO_SANITIZE_ALL
360void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
361 size_t n, bool StopAtZero) {
362 if (!n) return;
363 size_t Len = std::min(n, Word::GetMaxSize());
364 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
365 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
366 uint8_t B1[Word::kMaxSize];
367 uint8_t B2[Word::kMaxSize];
368 // Copy the data into locals in this non-msan-instrumented function
369 // to avoid msan complaining further.
370 size_t Hash = 0; // Compute some simple hash of both strings.
371 for (size_t i = 0; i < Len; i++) {
372 B1[i] = A1[i];
373 B2[i] = A2[i];
374 size_t T = B1[i];
375 Hash ^= (T << 8) | B2[i];
376 }
377 size_t I = 0;
378 for (; I < Len; I++)
379 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
380 break;
381 size_t PC = reinterpret_cast<size_t>(caller_pc);
382 size_t Idx = (PC & 4095) | (I << 12);
383 ValueProfileMap.AddValue(Idx);
384 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
385}
386
387template <class T>
388ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
389ATTRIBUTE_NO_SANITIZE_ALL
390void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
391 uint64_t ArgXor = Arg1 ^ Arg2;
392 uint64_t ArgDistance = __builtin_popcountll(ArgXor) + 1; // [1,65]
393 uintptr_t Idx = ((PC & 4095) + 1) * ArgDistance;
394 if (sizeof(T) == 4)
dor1se6729cb2018-07-16 15:15:34 +0000395 TORC4.Insert(ArgXor, Arg1, Arg2);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000396 else if (sizeof(T) == 8)
dor1se6729cb2018-07-16 15:15:34 +0000397 TORC8.Insert(ArgXor, Arg1, Arg2);
kcc3850d062018-07-03 22:33:09 +0000398 // TODO: remove these flags and instead use all metrics at once.
399 if (UseValueProfileMask & 1)
400 ValueProfileMap.AddValue(Idx);
401 if (UseValueProfileMask & 2)
402 ValueProfileMap.AddValue(
403 PC * 64 + (Arg1 == Arg2 ? 0 : __builtin_clzll(Arg1 - Arg2) + 1));
404 if (UseValueProfileMask & 4) // alternative way to use the hamming distance
405 ValueProfileMap.AddValue(PC * 64 + ArgDistance);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000406}
407
408static size_t InternalStrnlen(const char *S, size_t MaxLen) {
409 size_t Len = 0;
410 for (; Len < MaxLen && S[Len]; Len++) {}
411 return Len;
412}
413
414// Finds min of (strlen(S1), strlen(S2)).
415// Needed bacause one of these strings may actually be non-zero terminated.
416static size_t InternalStrnlen2(const char *S1, const char *S2) {
417 size_t Len = 0;
418 for (; S1[Len] && S2[Len]; Len++) {}
419 return Len;
420}
421
422void TracePC::ClearInlineCounters() {
423 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
424 uint8_t *Beg = ModuleCounters[i].Start;
425 size_t Size = ModuleCounters[i].Stop - Beg;
426 memset(Beg, 0, Size);
427 }
428}
429
kcc0f3c0312017-08-22 01:50:00 +0000430ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000431void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000432 int stack;
433 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000434}
435
436uintptr_t TracePC::GetMaxStackOffset() const {
437 return InitialStack - __sancov_lowest_stack; // Stack grows down
438}
439
george.karpenkov29efa6d2017-08-21 23:25:50 +0000440} // namespace fuzzer
441
442extern "C" {
443ATTRIBUTE_INTERFACE
444ATTRIBUTE_NO_SANITIZE_ALL
445void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
446 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
447 uint32_t Idx = *Guard;
448 __sancov_trace_pc_pcs[Idx] = PC;
449 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000450}
451
452// Best-effort support for -fsanitize-coverage=trace-pc, which is available
453// in both Clang and GCC.
454ATTRIBUTE_INTERFACE
455ATTRIBUTE_NO_SANITIZE_ALL
456void __sanitizer_cov_trace_pc() {
457 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
458 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
459 __sancov_trace_pc_pcs[Idx] = PC;
460 __sancov_trace_pc_guard_8bit_counters[Idx]++;
461}
462
463ATTRIBUTE_INTERFACE
464void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
465 fuzzer::TPC.HandleInit(Start, Stop);
466}
467
468ATTRIBUTE_INTERFACE
469void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
470 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
471}
472
473ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000474void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
475 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000476 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
477}
478
479ATTRIBUTE_INTERFACE
480ATTRIBUTE_NO_SANITIZE_ALL
481void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
482 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
483 fuzzer::TPC.HandleCallerCallee(PC, Callee);
484}
485
486ATTRIBUTE_INTERFACE
487ATTRIBUTE_NO_SANITIZE_ALL
488ATTRIBUTE_TARGET_POPCNT
489void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
490 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
491 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
492}
493
494ATTRIBUTE_INTERFACE
495ATTRIBUTE_NO_SANITIZE_ALL
496ATTRIBUTE_TARGET_POPCNT
497// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
498// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
499// should be changed later to make full use of instrumentation.
500void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
501 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
502 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
503}
504
505ATTRIBUTE_INTERFACE
506ATTRIBUTE_NO_SANITIZE_ALL
507ATTRIBUTE_TARGET_POPCNT
508void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
509 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
510 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
511}
512
513ATTRIBUTE_INTERFACE
514ATTRIBUTE_NO_SANITIZE_ALL
515ATTRIBUTE_TARGET_POPCNT
516void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
517 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
518 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
519}
520
521ATTRIBUTE_INTERFACE
522ATTRIBUTE_NO_SANITIZE_ALL
523ATTRIBUTE_TARGET_POPCNT
524void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
525 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
526 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
527}
528
529ATTRIBUTE_INTERFACE
530ATTRIBUTE_NO_SANITIZE_ALL
531ATTRIBUTE_TARGET_POPCNT
532void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
533 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
534 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
535}
536
537ATTRIBUTE_INTERFACE
538ATTRIBUTE_NO_SANITIZE_ALL
539ATTRIBUTE_TARGET_POPCNT
540void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
541 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
542 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
543}
544
545ATTRIBUTE_INTERFACE
546ATTRIBUTE_NO_SANITIZE_ALL
547ATTRIBUTE_TARGET_POPCNT
548void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
549 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
550 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
551}
552
553ATTRIBUTE_INTERFACE
554ATTRIBUTE_NO_SANITIZE_ALL
555ATTRIBUTE_TARGET_POPCNT
556void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
557 uint64_t N = Cases[0];
558 uint64_t ValSizeInBits = Cases[1];
559 uint64_t *Vals = Cases + 2;
560 // Skip the most common and the most boring case.
561 if (Vals[N - 1] < 256 && Val < 256)
562 return;
563 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
564 size_t i;
565 uint64_t Token = 0;
566 for (i = 0; i < N; i++) {
567 Token = Val ^ Vals[i];
568 if (Val < Vals[i])
569 break;
570 }
571
572 if (ValSizeInBits == 16)
573 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
574 else if (ValSizeInBits == 32)
575 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
576 else
577 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
578}
579
580ATTRIBUTE_INTERFACE
581ATTRIBUTE_NO_SANITIZE_ALL
582ATTRIBUTE_TARGET_POPCNT
583void __sanitizer_cov_trace_div4(uint32_t Val) {
584 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
585 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
586}
587
588ATTRIBUTE_INTERFACE
589ATTRIBUTE_NO_SANITIZE_ALL
590ATTRIBUTE_TARGET_POPCNT
591void __sanitizer_cov_trace_div8(uint64_t Val) {
592 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
593 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
594}
595
596ATTRIBUTE_INTERFACE
597ATTRIBUTE_NO_SANITIZE_ALL
598ATTRIBUTE_TARGET_POPCNT
599void __sanitizer_cov_trace_gep(uintptr_t Idx) {
600 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
601 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
602}
603
604ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
605void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
606 const void *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000607 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000608 if (result == 0) return; // No reason to mutate.
609 if (n <= 1) return; // Not interesting.
610 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
611}
612
613ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
614void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
615 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000616 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000617 if (result == 0) return; // No reason to mutate.
618 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
619 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
620 n = std::min(n, Len1);
621 n = std::min(n, Len2);
622 if (n <= 1) return; // Not interesting.
623 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
624}
625
626ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
627void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
dor1se6729cb2018-07-16 15:15:34 +0000628 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000629 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000630 if (result == 0) return; // No reason to mutate.
631 size_t N = fuzzer::InternalStrnlen2(s1, s2);
632 if (N <= 1) return; // Not interesting.
633 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
634}
635
636ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
637void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
638 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000639 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000640 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
641}
642
643ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
644void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
645 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000646 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000647 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
648}
649
650ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
651void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
652 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000653 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000654 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
655}
656
657ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
658void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
659 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000660 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000661 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
662}
663
664ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
665void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
666 const void *s2, size_t len2, void *result) {
morehousec6ee8752018-07-17 16:12:00 +0000667 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000668 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
669}
670} // extern "C"