blob: 80b33105bb22dc909e115576882f402c126b78d2 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
metzman40132972019-01-09 21:46:09 +000016#include "FuzzerBuiltins.h"
17#include "FuzzerBuiltinsMsvc.h"
george.karpenkov29efa6d2017-08-21 23:25:50 +000018#include "FuzzerCorpus.h"
19#include "FuzzerDefs.h"
20#include "FuzzerDictionary.h"
21#include "FuzzerExtFunctions.h"
22#include "FuzzerIO.h"
23#include "FuzzerUtil.h"
24#include "FuzzerValueBitMap.h"
25#include <set>
26
27// The coverage counters and PCs.
28// These are declared as global variables named "__sancov_*" to simplify
29// experiments with inlined instrumentation.
30alignas(64) ATTRIBUTE_INTERFACE
31uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
32
33ATTRIBUTE_INTERFACE
34uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
35
kcc1c0379f2017-08-22 01:28:32 +000036// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse68f46432018-08-30 15:54:44 +000037ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000038
george.karpenkov29efa6d2017-08-21 23:25:50 +000039namespace fuzzer {
40
41TracePC TPC;
42
george.karpenkov29efa6d2017-08-21 23:25:50 +000043uint8_t *TracePC::Counters() const {
44 return __sancov_trace_pc_guard_8bit_counters;
45}
46
47uintptr_t *TracePC::PCs() const {
48 return __sancov_trace_pc_pcs;
49}
50
51size_t TracePC::GetTotalPCCoverage() {
52 if (ObservedPCs.size())
53 return ObservedPCs.size();
54 size_t Res = 0;
55 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
56 if (PCs()[i])
57 Res++;
58 return Res;
59}
60
61
62void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
63 if (Start == Stop) return;
64 if (NumModulesWithInline8bitCounters &&
65 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
66 assert(NumModulesWithInline8bitCounters <
67 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
68 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
69 NumInline8bitCounters += Stop - Start;
70}
71
kcc98957a12017-08-25 19:29:47 +000072void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
73 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
74 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +000075 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
76 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
77 ModulePCTable[NumPCTables++] = {B, E};
78 NumPCsInPCTables += E - B;
79}
80
81void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
82 if (Start == Stop || *Start) return;
83 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
84 for (uint32_t *P = Start; P < Stop; P++) {
85 NumGuards++;
86 if (NumGuards == kNumPCs) {
87 RawPrint(
88 "WARNING: The binary has too many instrumented PCs.\n"
89 " You may want to reduce the size of the binary\n"
90 " for more efficient fuzzing and precise coverage data\n");
91 }
92 *P = NumGuards % kNumPCs;
93 }
94 Modules[NumModules].Start = Start;
95 Modules[NumModules].Stop = Stop;
96 NumModules++;
97}
98
99void TracePC::PrintModuleInfo() {
100 if (NumGuards) {
101 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
102 for (size_t i = 0; i < NumModules; i++)
103 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
104 Modules[i].Start, Modules[i].Stop);
105 Printf("\n");
106 }
107 if (NumModulesWithInline8bitCounters) {
108 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
109 NumModulesWithInline8bitCounters, NumInline8bitCounters);
110 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
111 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
112 ModuleCounters[i].Start, ModuleCounters[i].Stop);
113 Printf("\n");
114 }
115 if (NumPCTables) {
116 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
117 NumPCsInPCTables);
118 for (size_t i = 0; i < NumPCTables; i++) {
119 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
120 ModulePCTable[i].Start, ModulePCTable[i].Stop);
121 }
122 Printf("\n");
123
124 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
125 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
kcce220ebb2017-10-14 00:07:11 +0000126 Printf("ERROR: The size of coverage PC tables does not match the\n"
127 "number of instrumented PCs. This might be a compiler bug,\n"
128 "please contact the libFuzzer developers.\n"
129 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
130 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000131 _Exit(1);
132 }
133 }
delcypherfcd19a82018-04-20 06:46:19 +0000134 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
135 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000136}
137
138ATTRIBUTE_NO_SANITIZE_ALL
139void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
140 const uintptr_t kBits = 12;
141 const uintptr_t kMask = (1 << kBits) - 1;
142 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
143 ValueProfileMap.AddValueModPrime(Idx);
144}
145
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000146/// \return the address of the previous instruction.
147/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
148inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
149#if defined(__arm__)
150 // T32 (Thumb) branch instructions might be 16 or 32 bit long,
151 // so we return (pc-2) in that case in order to be safe.
152 // For A32 mode we return (pc-4) because all instructions are 32 bit long.
153 return (PC - 3) & (~1);
154#elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
155 // PCs are always 4 byte aligned.
156 return PC - 4;
157#elif defined(__sparc__) || defined(__mips__)
158 return PC - 8;
159#else
160 return PC - 1;
161#endif
162}
163
164/// \return the address of the next instruction.
165/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
166inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
167#if defined(__mips__)
168 return PC + 8;
169#elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
170 defined(__aarch64__)
171 return PC + 4;
172#else
173 return PC + 1;
174#endif
175}
176
george.karpenkov29efa6d2017-08-21 23:25:50 +0000177void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000178 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000179 auto ObservePC = [&](uintptr_t PC) {
kcc09b3e5f2018-07-06 19:47:00 +0000180 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000181 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
kcc09b3e5f2018-07-06 19:47:00 +0000182 Printf("\n");
183 }
kcc1c0379f2017-08-22 01:28:32 +0000184 };
kcc00da6482017-08-25 20:09:25 +0000185
186 auto Observe = [&](const PCTableEntry &TE) {
187 if (TE.PCFlags & 1)
kccb3080d02018-07-19 22:00:48 +0000188 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
kccec9da662017-08-28 22:52:22 +0000189 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000190 ObservePC(TE.PC);
191 };
192
george.karpenkov29efa6d2017-08-21 23:25:50 +0000193 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000194 if (NumInline8bitCounters == NumPCsInPCTables) {
metzmanb8a1ad52019-01-15 22:12:51 +0000195 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
196 uint8_t *Beg = ModuleCounters[i].Start;
197 size_t Size = ModuleCounters[i].Stop - Beg;
198 assert(Size ==
199 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
200 for (size_t j = 0; j < Size; j++)
201 if (Beg[j])
202 Observe(ModulePCTable[i].Start[j]);
203 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000204 } else if (NumGuards == NumPCsInPCTables) {
205 size_t GuardIdx = 1;
206 for (size_t i = 0; i < NumModules; i++) {
207 uint32_t *Beg = Modules[i].Start;
208 size_t Size = Modules[i].Stop - Beg;
209 assert(Size ==
210 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
211 for (size_t j = 0; j < Size; j++, GuardIdx++)
212 if (Counters()[GuardIdx])
kcc00da6482017-08-25 20:09:25 +0000213 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000214 }
215 }
216 }
kccec9da662017-08-28 22:52:22 +0000217
kccb3080d02018-07-19 22:00:48 +0000218 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
219 i++) {
kcc873dc112018-06-07 21:15:24 +0000220 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000221 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
kcc09b3e5f2018-07-06 19:47:00 +0000222 Printf("\n");
kccec9da662017-08-28 22:52:22 +0000223 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000224}
225
george.karpenkov29efa6d2017-08-21 23:25:50 +0000226
227static std::string GetModuleName(uintptr_t PC) {
228 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
229 void *OffsetRaw = nullptr;
230 if (!EF->__sanitizer_get_module_and_offset_for_pc(
231 reinterpret_cast<void *>(PC), ModulePathRaw,
232 sizeof(ModulePathRaw), &OffsetRaw))
233 return "";
234 return ModulePathRaw;
235}
236
kcc85cad3d2018-05-11 01:17:52 +0000237template<class CallBack>
238void TracePC::IterateCoveredFunctions(CallBack CB) {
239 for (size_t i = 0; i < NumPCTables; i++) {
240 auto &M = ModulePCTable[i];
241 assert(M.Start < M.Stop);
242 auto ModuleName = GetModuleName(M.Start->PC);
243 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
244 auto FE = NextFE;
245 assert((FE->PCFlags & 1) && "Not a function entry point");
246 do {
247 NextFE++;
248 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
249 if (ObservedFuncs.count(FE->PC))
kccb3080d02018-07-19 22:00:48 +0000250 CB(FE, NextFE, ObservedFuncs[FE->PC]);
kcc85cad3d2018-05-11 01:17:52 +0000251 }
252 }
253}
254
kcc3acbe072018-05-16 23:26:37 +0000255void TracePC::SetFocusFunction(const std::string &FuncName) {
256 // This function should be called once.
257 assert(FocusFunction.first > NumModulesWithInline8bitCounters);
258 if (FuncName.empty())
259 return;
260 for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
261 auto &PCTE = ModulePCTable[M];
262 size_t N = PCTE.Stop - PCTE.Start;
263 for (size_t I = 0; I < N; I++) {
264 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
265 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
266 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
267 Name = Name.substr(3, std::string::npos);
268 if (FuncName != Name) continue;
269 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
270 FocusFunction = {M, I};
271 return;
272 }
273 }
274}
275
276bool TracePC::ObservedFocusFunction() {
277 size_t I = FocusFunction.first;
278 size_t J = FocusFunction.second;
279 if (I >= NumModulesWithInline8bitCounters)
280 return false;
281 auto &MC = ModuleCounters[I];
282 size_t Size = MC.Stop - MC.Start;
283 if (J >= Size)
284 return false;
285 return MC.Start[J] != 0;
286}
287
george.karpenkov29efa6d2017-08-21 23:25:50 +0000288void TracePC::PrintCoverage() {
289 if (!EF->__sanitizer_symbolize_pc ||
290 !EF->__sanitizer_get_module_and_offset_for_pc) {
291 Printf("INFO: __sanitizer_symbolize_pc or "
292 "__sanitizer_get_module_and_offset_for_pc is not available,"
293 " not printing coverage\n");
294 return;
295 }
296 Printf("COVERAGE:\n");
kccb3080d02018-07-19 22:00:48 +0000297 auto CoveredFunctionCallback = [&](const PCTableEntry *First,
298 const PCTableEntry *Last,
299 uintptr_t Counter) {
kcc85cad3d2018-05-11 01:17:52 +0000300 assert(First < Last);
301 auto VisualizePC = GetNextInstructionPc(First->PC);
302 std::string FileStr = DescribePC("%s", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000303 if (!IsInterestingCoverageFile(FileStr))
304 return;
kcc85cad3d2018-05-11 01:17:52 +0000305 std::string FunctionStr = DescribePC("%F", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000306 if (FunctionStr.find("in ") == 0)
307 FunctionStr = FunctionStr.substr(3);
kcc85cad3d2018-05-11 01:17:52 +0000308 std::string LineStr = DescribePC("%l", VisualizePC);
309 size_t Line = std::stoul(LineStr);
kccb3080d02018-07-19 22:00:48 +0000310 size_t NumEdges = Last - First;
morehousef64b9402018-06-25 15:59:24 +0000311 Vector<uintptr_t> UncoveredPCs;
kcc85cad3d2018-05-11 01:17:52 +0000312 for (auto TE = First; TE < Last; TE++)
313 if (!ObservedPCs.count(TE->PC))
314 UncoveredPCs.push_back(TE->PC);
kccb3080d02018-07-19 22:00:48 +0000315 Printf("COVERED_FUNC: hits: %zd", Counter);
316 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
317 Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
318 for (auto PC: UncoveredPCs)
kcc85cad3d2018-05-11 01:17:52 +0000319 Printf(" UNCOVERED_PC: %s\n",
320 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000321 };
322
kcc85cad3d2018-05-11 01:17:52 +0000323 IterateCoveredFunctions(CoveredFunctionCallback);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000324}
325
kcca7dd2a92018-05-21 19:47:00 +0000326void TracePC::DumpCoverage() {
327 if (EF->__sanitizer_dump_coverage) {
328 Vector<uintptr_t> PCsCopy(GetNumPCs());
329 for (size_t i = 0; i < GetNumPCs(); i++)
330 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
331 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
332 }
333}
334
george.karpenkov29efa6d2017-08-21 23:25:50 +0000335// Value profile.
336// We keep track of various values that affect control flow.
337// These values are inserted into a bit-set-based hash map.
338// Every new bit in the map is treated as a new coverage.
339//
340// For memcmp/strcmp/etc the interesting value is the length of the common
341// prefix of the parameters.
342// For cmp instructions the interesting value is a XOR of the parameters.
343// The interesting value is mixed up with the PC and is then added to the map.
344
345ATTRIBUTE_NO_SANITIZE_ALL
346void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
347 size_t n, bool StopAtZero) {
348 if (!n) return;
349 size_t Len = std::min(n, Word::GetMaxSize());
350 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
351 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
352 uint8_t B1[Word::kMaxSize];
353 uint8_t B2[Word::kMaxSize];
354 // Copy the data into locals in this non-msan-instrumented function
355 // to avoid msan complaining further.
356 size_t Hash = 0; // Compute some simple hash of both strings.
357 for (size_t i = 0; i < Len; i++) {
358 B1[i] = A1[i];
359 B2[i] = A2[i];
360 size_t T = B1[i];
361 Hash ^= (T << 8) | B2[i];
362 }
363 size_t I = 0;
364 for (; I < Len; I++)
365 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
366 break;
367 size_t PC = reinterpret_cast<size_t>(caller_pc);
368 size_t Idx = (PC & 4095) | (I << 12);
369 ValueProfileMap.AddValue(Idx);
370 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
371}
372
373template <class T>
374ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
375ATTRIBUTE_NO_SANITIZE_ALL
376void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
377 uint64_t ArgXor = Arg1 ^ Arg2;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000378 if (sizeof(T) == 4)
dor1se6729cb2018-07-16 15:15:34 +0000379 TORC4.Insert(ArgXor, Arg1, Arg2);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000380 else if (sizeof(T) == 8)
dor1se6729cb2018-07-16 15:15:34 +0000381 TORC8.Insert(ArgXor, Arg1, Arg2);
metzman40132972019-01-09 21:46:09 +0000382 uint64_t HammingDistance = Popcountll(ArgXor); // [0,64]
383 uint64_t AbsoluteDistance = (Arg1 == Arg2 ? 0 : Clzll(Arg1 - Arg2) + 1);
kcc6d3b8e92018-08-02 00:24:49 +0000384 ValueProfileMap.AddValue(PC * 128 + HammingDistance);
385 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000386}
387
388static size_t InternalStrnlen(const char *S, size_t MaxLen) {
389 size_t Len = 0;
390 for (; Len < MaxLen && S[Len]; Len++) {}
391 return Len;
392}
393
394// Finds min of (strlen(S1), strlen(S2)).
395// Needed bacause one of these strings may actually be non-zero terminated.
396static size_t InternalStrnlen2(const char *S1, const char *S2) {
397 size_t Len = 0;
398 for (; S1[Len] && S2[Len]; Len++) {}
399 return Len;
400}
401
402void TracePC::ClearInlineCounters() {
403 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
404 uint8_t *Beg = ModuleCounters[i].Start;
405 size_t Size = ModuleCounters[i].Stop - Beg;
406 memset(Beg, 0, Size);
407 }
408}
409
kcc0f3c0312017-08-22 01:50:00 +0000410ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000411void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000412 int stack;
413 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000414}
415
416uintptr_t TracePC::GetMaxStackOffset() const {
417 return InitialStack - __sancov_lowest_stack; // Stack grows down
418}
419
george.karpenkov29efa6d2017-08-21 23:25:50 +0000420} // namespace fuzzer
421
422extern "C" {
423ATTRIBUTE_INTERFACE
424ATTRIBUTE_NO_SANITIZE_ALL
425void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
metzman40132972019-01-09 21:46:09 +0000426 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000427 uint32_t Idx = *Guard;
428 __sancov_trace_pc_pcs[Idx] = PC;
429 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000430}
431
432// Best-effort support for -fsanitize-coverage=trace-pc, which is available
433// in both Clang and GCC.
434ATTRIBUTE_INTERFACE
435ATTRIBUTE_NO_SANITIZE_ALL
436void __sanitizer_cov_trace_pc() {
metzman40132972019-01-09 21:46:09 +0000437 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000438 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
439 __sancov_trace_pc_pcs[Idx] = PC;
440 __sancov_trace_pc_guard_8bit_counters[Idx]++;
441}
442
443ATTRIBUTE_INTERFACE
444void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
445 fuzzer::TPC.HandleInit(Start, Stop);
446}
447
448ATTRIBUTE_INTERFACE
449void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
450 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
451}
452
453ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000454void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
455 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000456 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
457}
458
459ATTRIBUTE_INTERFACE
460ATTRIBUTE_NO_SANITIZE_ALL
461void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
metzman40132972019-01-09 21:46:09 +0000462 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000463 fuzzer::TPC.HandleCallerCallee(PC, Callee);
464}
465
466ATTRIBUTE_INTERFACE
467ATTRIBUTE_NO_SANITIZE_ALL
468ATTRIBUTE_TARGET_POPCNT
469void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000470 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000471 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
472}
473
474ATTRIBUTE_INTERFACE
475ATTRIBUTE_NO_SANITIZE_ALL
476ATTRIBUTE_TARGET_POPCNT
477// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
478// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
479// should be changed later to make full use of instrumentation.
480void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000481 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000482 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
483}
484
485ATTRIBUTE_INTERFACE
486ATTRIBUTE_NO_SANITIZE_ALL
487ATTRIBUTE_TARGET_POPCNT
488void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000489 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000490 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
491}
492
493ATTRIBUTE_INTERFACE
494ATTRIBUTE_NO_SANITIZE_ALL
495ATTRIBUTE_TARGET_POPCNT
496void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000497 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000498 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
499}
500
501ATTRIBUTE_INTERFACE
502ATTRIBUTE_NO_SANITIZE_ALL
503ATTRIBUTE_TARGET_POPCNT
504void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000505 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000506 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
507}
508
509ATTRIBUTE_INTERFACE
510ATTRIBUTE_NO_SANITIZE_ALL
511ATTRIBUTE_TARGET_POPCNT
512void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000513 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000514 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
515}
516
517ATTRIBUTE_INTERFACE
518ATTRIBUTE_NO_SANITIZE_ALL
519ATTRIBUTE_TARGET_POPCNT
520void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000521 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000522 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
523}
524
525ATTRIBUTE_INTERFACE
526ATTRIBUTE_NO_SANITIZE_ALL
527ATTRIBUTE_TARGET_POPCNT
528void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000529 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000530 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
531}
532
533ATTRIBUTE_INTERFACE
534ATTRIBUTE_NO_SANITIZE_ALL
535ATTRIBUTE_TARGET_POPCNT
536void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
537 uint64_t N = Cases[0];
538 uint64_t ValSizeInBits = Cases[1];
539 uint64_t *Vals = Cases + 2;
540 // Skip the most common and the most boring case.
541 if (Vals[N - 1] < 256 && Val < 256)
542 return;
metzman40132972019-01-09 21:46:09 +0000543 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000544 size_t i;
545 uint64_t Token = 0;
546 for (i = 0; i < N; i++) {
547 Token = Val ^ Vals[i];
548 if (Val < Vals[i])
549 break;
550 }
551
552 if (ValSizeInBits == 16)
553 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
554 else if (ValSizeInBits == 32)
555 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
556 else
557 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
558}
559
560ATTRIBUTE_INTERFACE
561ATTRIBUTE_NO_SANITIZE_ALL
562ATTRIBUTE_TARGET_POPCNT
563void __sanitizer_cov_trace_div4(uint32_t Val) {
metzman40132972019-01-09 21:46:09 +0000564 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000565 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
566}
567
568ATTRIBUTE_INTERFACE
569ATTRIBUTE_NO_SANITIZE_ALL
570ATTRIBUTE_TARGET_POPCNT
571void __sanitizer_cov_trace_div8(uint64_t Val) {
metzman40132972019-01-09 21:46:09 +0000572 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000573 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
574}
575
576ATTRIBUTE_INTERFACE
577ATTRIBUTE_NO_SANITIZE_ALL
578ATTRIBUTE_TARGET_POPCNT
579void __sanitizer_cov_trace_gep(uintptr_t Idx) {
metzman40132972019-01-09 21:46:09 +0000580 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000581 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
582}
583
584ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
585void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
586 const void *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000587 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000588 if (result == 0) return; // No reason to mutate.
589 if (n <= 1) return; // Not interesting.
590 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
591}
592
593ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
594void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
595 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000596 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000597 if (result == 0) return; // No reason to mutate.
598 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
599 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
600 n = std::min(n, Len1);
601 n = std::min(n, Len2);
602 if (n <= 1) return; // Not interesting.
603 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
604}
605
606ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
607void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
dor1se6729cb2018-07-16 15:15:34 +0000608 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000609 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000610 if (result == 0) return; // No reason to mutate.
611 size_t N = fuzzer::InternalStrnlen2(s1, s2);
612 if (N <= 1) return; // Not interesting.
613 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
614}
615
616ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
617void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
618 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000619 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000620 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
621}
622
623ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
624void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
625 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000626 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000627 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
628}
629
630ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
631void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
632 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000633 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000634 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
635}
636
637ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
638void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
639 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000640 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000641 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
642}
643
644ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
645void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
646 const void *s2, size_t len2, void *result) {
morehousec6ee8752018-07-17 16:12:00 +0000647 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000648 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
649}
650} // extern "C"